diff options
Diffstat (limited to 'MPE/spark/iplearning')
| -rw-r--r-- | MPE/spark/iplearning/application.properties | 44 | ||||
| -rw-r--r-- | MPE/spark/iplearning/ip-learning-spark.jar | bin | 0 -> 125276478 bytes | |||
| -rw-r--r-- | MPE/spark/iplearning/iplearning.sh | 53 | ||||
| -rw-r--r-- | MPE/spark/iplearning/iplearning_monitor.sh | 56 | ||||
| -rw-r--r-- | MPE/spark/iplearning/logs/IpLearningApplication/running-2023-07.log | 9683 | ||||
| -rw-r--r-- | MPE/spark/iplearning/logs/IpRecommendApplication/running-2023-07.log | 9640 | ||||
| -rw-r--r-- | MPE/spark/iplearning/logs/SubscriberRecommendApplication/running-2023-07.log | 9673 | ||||
| -rw-r--r-- | MPE/spark/iplearning/runnum | 1 | ||||
| -rw-r--r-- | MPE/spark/iplearning/upconfig.sh | 7 |
9 files changed, 29157 insertions, 0 deletions
diff --git a/MPE/spark/iplearning/application.properties b/MPE/spark/iplearning/application.properties new file mode 100644 index 0000000..9755a8f --- /dev/null +++ b/MPE/spark/iplearning/application.properties @@ -0,0 +1,44 @@ +#spark任务配置 +spark.sql.shuffle.partitions=30 +spark.executor.memory=1g +spark.executor.cores=5 +spark.cores.max=30 +spark.app.name=test +spark.network.timeout=300s +spark.serializer=org.apache.spark.serializer.KryoSerializer +master=spark://192.168.20.223:7077 +#spark读取clickhouse配置 +spark.read.clickhouse.url=jdbc:clickhouse://192.168.20.252:8124/tsg_galaxy_v3 +spark.read.clickhouse.driver=ru.yandex.clickhouse.ClickHouseDriver +spark.read.clickhouse.user=default +spark.read.clickhouse.password=galaxy2019 +spark.read.clickhouse.numPartitions=5 +spark.read.clickhouse.fetchsize=10000 +spark.read.clickhouse.partitionColumn=LAST_FOUND_TIME +spark.read.clickhouse.session.table=session_record +spark.read.clickhouse.radius.table=radius_record +clickhouse.socket.timeout=3600000 +#arangoDB配置 +arangoDB.host=192.168.20.222 +arangoDB.port=8529 +arangoDB.user=upsert +arangoDB.password=galaxy2019 +arangoDB.DB.name=tsg_galaxy_v3 +arangoDB.ttl=3600 + +thread.pool.number=10 + +#读取clickhouse时间范围方式,0:读取过去一小时;1:指定时间范围 +clickhouse.time.limit.type=0 +read.clickhouse.max.time=1608518990 +read.clickhouse.min.time=1604851201 + +update.arango.batch=10000 + +distinct.client.ip.num=10000 +recent.count.hour=24 + +update.interval=3600 +arangodb.total.num=100000000 +#读取radius时间范围,与radius任务执行周期一致,单位:分钟 +read.radius.granularity=-60 diff --git a/MPE/spark/iplearning/ip-learning-spark.jar b/MPE/spark/iplearning/ip-learning-spark.jar Binary files differnew file mode 100644 index 0000000..6a5af10 --- /dev/null +++ b/MPE/spark/iplearning/ip-learning-spark.jar diff --git a/MPE/spark/iplearning/iplearning.sh b/MPE/spark/iplearning/iplearning.sh new file mode 100644 index 0000000..48f64b1 --- /dev/null +++ b/MPE/spark/iplearning/iplearning.sh @@ -0,0 +1,53 @@ +#!/bin/bash +source /etc/profile + +jar_name="ip-learning-spark.jar" + +spark_home=/data/tsg/olap/spark-2.2.3-bin-hadoop2.7 + +isrun=`ps -ef | grep -w "ip-learning-spark.jar" | grep -v grep | wc -l` + +if [ ! -f "$spark_home/iplearning/runnum" ];then + echo 0 > $spark_home/iplearning/runnum +fi + +if [ ! -f "$spark_home/iplearning/logs" ];then + mkdir -p $spark_home/iplearning/logs + mkdir -p $spark_home/iplearning/logs/IpLearningApplication + mkdir -p $spark_home/iplearning/logs/IpRecommendApplication + mkdir -p $spark_home/iplearning/logs/SubscriberRecommendApplication +fi + +#start spark job +function startJob(){ + +echo "IpLearningApplication 程序运行 - `date "+%Y-%m-%d %H:%M:%S"`" >> $spark_home/iplearning/logs/IpLearningApplication/running-`date +'%Y-%m'`.log + +nohup ${spark_home}/bin/spark-submit --class cn.ac.iie.main.IpLearningApplication --driver-java-options "-Dlog4j.configuration=file:$spark_home/conf/log4j-task.properties" --conf spark.executor.extraJavaOptions="-Dlog4j.configuration=file:$spark_home/conf/log4j-task.properties" ${spark_home}/iplearning/$jar_name >> $spark_home/iplearning/logs/IpLearningApplication/running-`date +'%Y-%m'`.log + +echo "IpRecommendApplication 程序运行 - `date "+%Y-%m-%d %H:%M:%S"`" >> $spark_home/iplearning/logs/IpRecommendApplication/running-`date +'%Y-%m'`.log + +nohup ${spark_home}/bin/spark-submit --class cn.ac.iie.main.IpRecommendApplication --driver-java-options "-Dlog4j.configuration=file:$spark_home/conf/log4j-task.properties" --conf spark.executor.extraJavaOptions="-Dlog4j.configuration=file:$spark_home/conf/log4j-task.properties" ${spark_home}/iplearning/$jar_name >> $spark_home/iplearning/logs/IpRecommendApplication/running-`date +'%Y-%m'`.log + +echo "SubscriberRecommendApplication 程序运行 - `date "+%Y-%m-%d %H:%M:%S"`" >> $spark_home/iplearning/logs/SubscriberRecommendApplication/running-`date +'%Y-%m'`.log + +nohup ${spark_home}/bin/spark-submit --class cn.ac.iie.main.SubscriberRecommendApplication --driver-java-options "-Dlog4j.configuration=file:$spark_home/conf/log4j-task.properties" --conf spark.executor.extraJavaOptions="-Dlog4j.configuration=file:$spark_home/conf/log4j-task.properties" ${spark_home}/iplearning/$jar_name >> $spark_home/iplearning/logs/SubscriberRecommendApplication/running-`date +'%Y-%m'`.log + +} + +if [[ $isrun -ge 1 ]];then + OLD_NUM=`cat $spark_home/iplearning/runnum` + RESTART_NUM=`expr $OLD_NUM + 1` + echo $RESTART_NUM > $spark_home/iplearning/runnum + + if [ $RESTART_NUM -eq "3" ];then + pgrep -f "ip-learning-spark.jar" |xargs kill -9 + startJob + echo 1 > $spark_home/iplearning/runnum + fi +else + startJob + echo 1 > $spark_home/iplearning/runnum +fi + + diff --git a/MPE/spark/iplearning/iplearning_monitor.sh b/MPE/spark/iplearning/iplearning_monitor.sh new file mode 100644 index 0000000..59e4c3d --- /dev/null +++ b/MPE/spark/iplearning/iplearning_monitor.sh @@ -0,0 +1,56 @@ +#! /bin/bash + +source /etc/profile + +#iplearning 日志文件目录 +#iplearning_log_file='/data/tsg/olap/spark-2.2.3-bin-hadoop2.7/logs/ip-learning-application.log' +iplearning_log_file='/data/tsg/olap/spark-2.2.3-bin-hadoop2.7/logs/ip-learning-application-`date +'%Y-%m-%d'`.log' +iplearning_monitor_prom_file="/data/tsg/olap/galaxy/volumes/node-exporter/prom/iplearning_monitor.prom" + +#arangoDB R_LOCATE_FQDN2IP数量 +fqdn_locate_ip_arango_count=`sed -n '/RETURN LENGTH(R_LOCATE_FQDN2IP)/p' $iplearning_log_file | awk -v FS=':' '{print $2}' | awk '{print $1}' | awk 'END {print}'` +echo "fqdn_locate_ip_arango_count $fqdn_locate_ip_arango_count" > $iplearning_monitor_prom_file + +#读取arangoDB R_LOCATE_FQDN2IP耗时 +read_fqdn_locate_ip_arango_time=`sed -n '/读取R_LOCATE_FQDN2IP arangoDB 共耗时/p' $iplearning_log_file | awk -v FS=':' '{print $2}' | awk '{print $1}' | awk 'END {print}'` +echo "read_fqdn_locate_ip_arango_time $read_fqdn_locate_ip_arango_time" >> $iplearning_monitor_prom_file + +#clickhouse fqdn-locate-ip边关系数量 +fqdn_locate_ip_ck_count=`sed -n '/读取R_LOCATE_FQDN2IP clickhouse成功/p' $iplearning_log_file | awk -v FS=':' '{print $2}' | awk '{print $1}' | awk 'END {print}'` +echo fqdn_locate_ip_ck_count $fqdn_locate_ip_ck_count >> $iplearning_monitor_prom_file + +#更新R_LOCATE_FQDN2IP耗时 +update_fqdn_locate_ip_time=`sed -n '/更新R_LOCATE_FQDN2IP 时间/p' $iplearning_log_file | awk -v FS=':' '{print $2}' | awk '{print $1}' | awk 'END {print}'` +echo update_fqdn_locate_ip_time $update_fqdn_locate_ip_time >> $iplearning_monitor_prom_file + +#arangoDB SUBSCRIBER数量 +subid_arango_count=`sed -n '/RETURN LENGTH(SUBSCRIBER)/p' $iplearning_log_file | awk -v FS=':' '{print $2}' | awk '{print $1}' | awk 'END {print}'` +echo subid_arango_count $subid_arango_count >> $iplearning_monitor_prom_file + +#读取arangoDB SUBSCRIBER耗时 +read_subid_arango_time=`sed -n '/读取SUBSCRIBER arangoDB 共耗时/p' $iplearning_log_file | awk -v FS=':' '{print $2}' | awk '{print $1}' | awk 'END {print}'` +echo read_subid_arango_time $read_subid_arango_time >> $iplearning_monitor_prom_file + +#clickhouse subscriberid点数量 +subid_ck_count=`sed -n '/读取SUBSCRIBER clickhouse成功/p' $iplearning_log_file | awk -v FS=':' '{print $2}' | awk '{print $1}' | awk 'END {print}'` +echo subid_ck_count $subid_ck_count >> $iplearning_monitor_prom_file + +#更新SUBSCRIBER耗时 +update_subid_time=`sed -n '/更新SUBSCRIBER 时间/p' $iplearning_log_file | awk -v FS=':' '{print $2}' | awk '{print $1}' | awk 'END {print}'` +echo update_subid_time $update_subid_time >> $iplearning_monitor_prom_file + +#arangoDB R_LOCATE_SUBSCRIBER2IP数量 +subid_locate_ip_arango_count=`sed -n '/RETURN LENGTH(R_LOCATE_SUBSCRIBER2IP)/p' $iplearning_log_file | awk -v FS=':' '{print $2}' | awk '{print $1}' | awk 'END {print}'` +echo subid_locate_ip_arango_count $subid_locate_ip_arango_count >> $iplearning_monitor_prom_file + +#读取arangoDB R_LOCATE_SUBSCRIBER2IP耗时 +read_subid_locate_ip_arango_time=`sed -n '/读取R_LOCATE_SUBSCRIBER2IP arangoDB 共耗时/p' $iplearning_log_file | awk -v FS=':' '{print $2}' | awk '{print $1}' | awk 'END {print}'` +echo read_subid_locate_ip_arango_time $read_subid_locate_ip_arango_time >> $iplearning_monitor_prom_file + +#clickhouse subscriberid-locate-ip边关系数量 +subid_locate_ip_ck_count=`sed -n '/读取R_LOCATE_SUBSCRIBER2IP clickhouse成功/p' $iplearning_log_file | awk -v FS=':' '{print $2}' | awk '{print $1}' | awk 'END {print}'` +echo subid_locate_ip_ck_count $subid_locate_ip_ck_count >> $iplearning_monitor_prom_file + +#更新R_LOCATE_SUBSCRIBER2IP耗时 +update_subid_locate_ip_time=`sed -n '/更新R_LOCATE_SUBSCRIBER2IP 时间/p' $iplearning_log_file | awk -v FS=':' '{print $2}' | awk '{print $1}' | awk 'END {print}'` +echo update_subid_locate_ip_time $update_subid_locate_ip_time >> $iplearning_monitor_prom_file diff --git a/MPE/spark/iplearning/logs/IpLearningApplication/running-2023-07.log b/MPE/spark/iplearning/logs/IpLearningApplication/running-2023-07.log new file mode 100644 index 0000000..49f701e --- /dev/null +++ b/MPE/spark/iplearning/logs/IpLearningApplication/running-2023-07.log @@ -0,0 +1,9683 @@ +IpLearningApplication 程序运行 - 2023-07-12 04:40:31 +[2023-07-12 04:41:33+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-12 04:41:33+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-12 05:00:00 +[2023-07-12 05:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-12 05:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-12 06:00:00 +[2023-07-12 06:01:01+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-12 06:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.NullPointerException + at org.apache.spark.SparkContext.<init>(SparkContext.scala:567) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-12 07:00:00 +[2023-07-12 07:01:01+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-12 07:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-12 08:00:00 +[2023-07-12 08:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-12 08:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-12 09:00:00 +[2023-07-12 09:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-12 09:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-12 10:00:00 +[2023-07-12 10:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-12 10:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-12 11:00:00 +[2023-07-12 11:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-12 11:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-12 12:00:00 +[2023-07-12 12:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-12 12:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-12 13:00:00 +[2023-07-12 13:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-12 13:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-12 14:00:00 +[2023-07-12 14:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-12 14:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-12 15:00:00 +[2023-07-12 15:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-12 15:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-12 16:00:00 +[2023-07-12 16:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-12 16:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.NullPointerException + at org.apache.spark.SparkContext.<init>(SparkContext.scala:567) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-12 17:00:00 +[2023-07-12 17:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-12 17:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-12 18:00:00 +[2023-07-12 18:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-12 18:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-12 19:00:00 +[2023-07-12 19:01:01+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-12 19:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-12 20:00:00 +[2023-07-12 20:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-12 20:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.NullPointerException + at org.apache.spark.SparkContext.<init>(SparkContext.scala:567) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-12 21:00:00 +[2023-07-12 21:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-12 21:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-12 22:00:00 +[2023-07-12 22:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-12 22:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-12 23:00:00 +[2023-07-12 23:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-12 23:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.NullPointerException + at org.apache.spark.SparkContext.<init>(SparkContext.scala:567) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-13 00:00:00 +[2023-07-13 00:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-13 00:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-13 01:00:00 +[2023-07-13 01:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-13 01:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-13 02:00:00 +[2023-07-13 02:01:01+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-13 02:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-13 03:00:00 +[2023-07-13 03:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-13 03:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-13 04:00:00 +[2023-07-13 04:01:01+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-13 04:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-13 05:00:00 +[2023-07-13 05:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-13 05:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-13 06:00:00 +[2023-07-13 06:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-13 06:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-13 07:00:00 +[2023-07-13 07:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-13 07:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-13 08:00:00 +[2023-07-13 08:01:01+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-13 08:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-13 09:00:00 +[2023-07-13 09:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-13 09:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-13 10:00:00 +[2023-07-13 10:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-13 10:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-13 11:00:00 +[2023-07-13 11:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-13 11:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-13 12:00:00 +[2023-07-13 12:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-13 12:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.NullPointerException + at org.apache.spark.SparkContext.<init>(SparkContext.scala:567) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-13 13:00:00 +[2023-07-13 13:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-13 13:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-13 14:00:00 +[2023-07-13 14:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-13 14:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-13 15:00:00 +[2023-07-13 15:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-13 15:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-13 16:00:00 +[2023-07-13 16:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-13 16:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-13 17:00:00 +[2023-07-13 17:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-13 17:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.NullPointerException + at org.apache.spark.SparkContext.<init>(SparkContext.scala:567) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-13 18:00:00 +[2023-07-13 18:01:01+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-13 18:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-13 19:00:00 +[2023-07-13 19:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-13 19:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-13 20:00:00 +[2023-07-13 20:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-13 20:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-13 21:00:00 +[2023-07-13 21:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-13 21:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-13 22:00:00 +[2023-07-13 22:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-13 22:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-13 23:00:00 +[2023-07-13 23:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-13 23:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.NullPointerException + at org.apache.spark.storage.BlockManagerMaster.registerBlockManager(BlockManagerMaster.scala:64) + at org.apache.spark.storage.BlockManager.initialize(BlockManager.scala:227) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:518) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-14 00:00:00 +[2023-07-14 00:01:01+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-14 00:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-14 01:00:00 +[2023-07-14 01:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-14 01:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-14 02:00:00 +[2023-07-14 02:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-14 02:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.NullPointerException + at org.apache.spark.SparkContext.<init>(SparkContext.scala:567) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-14 03:00:00 +[2023-07-14 03:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-14 03:01:03+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.NullPointerException + at org.apache.spark.SparkContext.<init>(SparkContext.scala:567) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-14 04:00:00 +[2023-07-14 04:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-14 04:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-14 05:00:00 +[2023-07-14 05:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-14 05:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-14 06:00:00 +[2023-07-14 06:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-14 06:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-14 07:00:00 +[2023-07-14 07:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-14 07:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-14 08:00:00 +[2023-07-14 08:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-14 08:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-14 09:00:00 +[2023-07-14 09:01:01+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-14 09:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-14 10:00:00 +[2023-07-14 10:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-14 10:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-14 11:00:00 +[2023-07-14 11:01:03+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-14 11:01:03+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-14 12:00:00 +[2023-07-14 12:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-14 12:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-14 13:00:00 +[2023-07-14 13:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-14 13:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-14 14:00:00 +[2023-07-14 14:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-14 14:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-14 15:00:00 +[2023-07-14 15:01:03+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-14 15:01:03+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-14 16:00:00 +[2023-07-14 16:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-14 16:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.NullPointerException + at org.apache.spark.SparkContext.<init>(SparkContext.scala:567) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-14 17:00:00 +[2023-07-14 17:01:03+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-14 17:01:03+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-14 18:00:00 +[2023-07-14 18:01:01+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-14 18:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-14 19:00:00 +[2023-07-14 19:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-14 19:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.NullPointerException + at org.apache.spark.SparkContext.<init>(SparkContext.scala:567) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-14 20:00:00 +[2023-07-14 20:01:01+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-14 20:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-14 21:00:00 +[2023-07-14 21:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-14 21:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-14 22:00:00 +[2023-07-14 22:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-14 22:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-14 23:00:00 +[2023-07-14 23:01:01+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-14 23:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-15 00:00:00 +[2023-07-15 00:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-15 00:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-15 01:00:00 +[2023-07-15 01:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-15 01:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-15 02:00:00 +[2023-07-15 02:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-15 02:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-15 03:00:00 +[2023-07-15 03:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-15 03:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-15 04:00:00 +[2023-07-15 04:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-15 04:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-15 05:00:00 +[2023-07-15 05:01:01+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-15 05:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-15 06:00:00 +[2023-07-15 06:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-15 06:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-15 07:00:00 +[2023-07-15 07:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-15 07:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-15 08:00:00 +[2023-07-15 08:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-15 08:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-15 09:00:00 +[2023-07-15 09:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-15 09:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-15 10:00:00 +[2023-07-15 10:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-15 10:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-15 11:00:00 +[2023-07-15 11:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-15 11:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-15 12:00:00 +[2023-07-15 12:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-15 12:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-15 13:00:00 +[2023-07-15 13:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-15 13:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-15 14:00:00 +[2023-07-15 14:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-15 14:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.NullPointerException + at org.apache.spark.SparkContext.<init>(SparkContext.scala:567) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-15 15:00:00 +[2023-07-15 15:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-15 15:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-15 16:00:00 +[2023-07-15 16:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-15 16:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-15 17:00:00 +[2023-07-15 17:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-15 17:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-15 18:00:00 +[2023-07-15 18:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-15 18:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.NullPointerException + at org.apache.spark.SparkContext.<init>(SparkContext.scala:567) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-15 19:00:00 +[2023-07-15 19:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-15 19:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-15 20:00:00 +[2023-07-15 20:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-15 20:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-15 21:00:00 +[2023-07-15 21:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-15 21:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-15 22:00:00 +[2023-07-15 22:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-15 22:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-15 23:00:00 +[2023-07-15 23:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-15 23:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.NullPointerException + at org.apache.spark.SparkContext.<init>(SparkContext.scala:567) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-16 00:00:00 +[2023-07-16 00:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-16 00:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-16 01:00:00 +[2023-07-16 01:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-16 01:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-16 02:00:00 +[2023-07-16 02:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-16 02:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-16 03:00:00 +[2023-07-16 03:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-16 03:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-16 04:00:00 +[2023-07-16 04:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-16 04:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-16 05:00:00 +[2023-07-16 05:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-16 05:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-16 06:00:00 +[2023-07-16 06:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-16 06:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-16 07:00:00 +[2023-07-16 07:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-16 07:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-16 08:00:00 +[2023-07-16 08:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-16 08:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.NullPointerException + at org.apache.spark.SparkContext.<init>(SparkContext.scala:567) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-16 09:00:00 +[2023-07-16 09:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-16 09:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-16 10:00:00 +[2023-07-16 10:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-16 10:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-16 11:00:00 +[2023-07-16 11:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-16 11:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.NullPointerException + at org.apache.spark.SparkContext.<init>(SparkContext.scala:567) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-16 12:00:00 +[2023-07-16 12:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-16 12:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-16 13:00:00 +[2023-07-16 13:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-16 13:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-16 14:00:00 +[2023-07-16 14:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-16 14:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-16 15:00:00 +[2023-07-16 15:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-16 15:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.NullPointerException + at org.apache.spark.SparkContext.<init>(SparkContext.scala:567) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-16 16:00:00 +[2023-07-16 16:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-16 16:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.NullPointerException + at org.apache.spark.storage.BlockManagerMaster.registerBlockManager(BlockManagerMaster.scala:64) + at org.apache.spark.storage.BlockManager.initialize(BlockManager.scala:227) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:518) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-16 17:00:00 +[2023-07-16 17:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-16 17:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-16 18:00:00 +[2023-07-16 18:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-16 18:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-16 19:00:00 +[2023-07-16 19:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-16 19:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-16 20:00:00 +[2023-07-16 20:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-16 20:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-16 21:00:00 +[2023-07-16 21:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-16 21:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-16 22:00:00 +[2023-07-16 22:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-16 22:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-16 23:00:00 +[2023-07-16 23:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-16 23:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-17 00:00:00 +[2023-07-17 00:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-17 00:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.NullPointerException + at org.apache.spark.SparkContext.<init>(SparkContext.scala:567) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-17 01:00:00 +[2023-07-17 01:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-17 01:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-17 02:00:00 +[2023-07-17 02:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-17 02:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-17 03:00:00 +[2023-07-17 03:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-17 03:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-17 04:00:00 +[2023-07-17 04:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-17 04:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-17 05:00:00 +[2023-07-17 05:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-17 05:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-17 06:00:00 +[2023-07-17 06:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-17 06:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-17 07:00:00 +[2023-07-17 07:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-17 07:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-17 08:00:00 +[2023-07-17 08:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-17 08:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.NullPointerException + at org.apache.spark.SparkContext.<init>(SparkContext.scala:567) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-17 09:00:00 +[2023-07-17 09:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-17 09:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-17 10:00:00 +[2023-07-17 10:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-17 10:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.NullPointerException + at org.apache.spark.storage.BlockManagerMaster.registerBlockManager(BlockManagerMaster.scala:64) + at org.apache.spark.storage.BlockManager.initialize(BlockManager.scala:227) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:518) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-17 11:00:00 +[2023-07-17 11:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-17 11:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-17 12:00:00 +[2023-07-17 12:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-17 12:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-17 13:00:00 +[2023-07-17 13:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-17 13:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-17 14:00:00 +[2023-07-17 14:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-17 14:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-17 15:00:00 +[2023-07-17 15:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-17 15:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-17 16:00:00 +[2023-07-17 16:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-17 16:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.NullPointerException + at org.apache.spark.SparkContext.<init>(SparkContext.scala:567) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-17 17:00:00 +[2023-07-17 17:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-17 17:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-17 18:00:00 +[2023-07-17 18:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-17 18:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.NullPointerException + at org.apache.spark.storage.BlockManagerMaster.registerBlockManager(BlockManagerMaster.scala:64) + at org.apache.spark.storage.BlockManager.initialize(BlockManager.scala:227) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:518) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-17 19:00:00 +[2023-07-17 19:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-17 19:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-17 20:00:00 +[2023-07-17 20:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-17 20:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-17 21:00:00 +[2023-07-17 21:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-17 21:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-17 22:00:00 +[2023-07-17 22:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-17 22:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.NullPointerException + at org.apache.spark.SparkContext.<init>(SparkContext.scala:567) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-17 23:00:00 +[2023-07-17 23:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-17 23:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-18 00:00:00 +[2023-07-18 00:01:01+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-18 00:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-18 01:00:00 +[2023-07-18 01:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-18 01:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-18 02:00:00 +[2023-07-18 02:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-18 02:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.NullPointerException + at org.apache.spark.storage.BlockManagerMaster.registerBlockManager(BlockManagerMaster.scala:64) + at org.apache.spark.storage.BlockManager.initialize(BlockManager.scala:227) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:518) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-18 03:00:00 +[2023-07-18 03:01:01+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-18 03:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-18 04:00:00 +[2023-07-18 04:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-18 04:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.NullPointerException + at org.apache.spark.SparkContext.<init>(SparkContext.scala:567) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-18 05:00:00 +[2023-07-18 05:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-18 05:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.NullPointerException + at org.apache.spark.SparkContext.<init>(SparkContext.scala:567) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-18 06:00:00 +[2023-07-18 06:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-18 06:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-18 07:00:00 +[2023-07-18 07:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-18 07:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-18 08:00:00 +[2023-07-18 08:01:01+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-18 08:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.NullPointerException + at org.apache.spark.storage.BlockManagerMaster.registerBlockManager(BlockManagerMaster.scala:64) + at org.apache.spark.storage.BlockManager.initialize(BlockManager.scala:227) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:518) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-18 09:00:00 +[2023-07-18 09:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-18 09:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-18 10:00:00 +[2023-07-18 10:01:01+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-18 10:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.NullPointerException + at org.apache.spark.SparkContext.<init>(SparkContext.scala:567) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-18 11:00:00 +[2023-07-18 11:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-18 11:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-18 12:00:00 +[2023-07-18 12:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-18 12:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-18 13:00:00 +[2023-07-18 13:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-18 13:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-18 14:00:00 +[2023-07-18 14:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-18 14:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.NullPointerException + at org.apache.spark.storage.BlockManagerMaster.registerBlockManager(BlockManagerMaster.scala:64) + at org.apache.spark.storage.BlockManager.initialize(BlockManager.scala:227) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:518) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-18 15:00:00 +[2023-07-18 15:01:01+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-18 15:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-18 16:00:00 +[2023-07-18 16:01:01+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-18 16:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.NullPointerException + at org.apache.spark.SparkContext.<init>(SparkContext.scala:567) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-18 17:00:00 +[2023-07-18 17:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-18 17:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-18 18:00:00 +[2023-07-18 18:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-18 18:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-18 19:00:00 +[2023-07-18 19:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-18 19:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-18 20:00:00 +[2023-07-18 20:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-18 20:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-18 21:00:00 +[2023-07-18 21:01:01+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-18 21:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-18 22:00:00 +[2023-07-18 22:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-18 22:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-18 23:00:00 +[2023-07-18 23:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-18 23:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-19 00:00:00 +[2023-07-19 00:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-19 00:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-19 01:00:00 +[2023-07-19 01:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-19 01:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-19 02:00:00 +[2023-07-19 02:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-19 02:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-19 03:00:00 +[2023-07-19 03:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-19 03:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-19 04:00:00 +[2023-07-19 04:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-19 04:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.NullPointerException + at org.apache.spark.SparkContext.<init>(SparkContext.scala:567) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-19 05:00:00 +[2023-07-19 05:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-19 05:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-19 06:00:00 +[2023-07-19 06:01:01+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-19 06:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-19 07:00:00 +[2023-07-19 07:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-19 07:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.NullPointerException + at org.apache.spark.SparkContext.<init>(SparkContext.scala:567) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-19 08:00:00 +[2023-07-19 08:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-19 08:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.NullPointerException + at org.apache.spark.storage.BlockManagerMaster.registerBlockManager(BlockManagerMaster.scala:64) + at org.apache.spark.storage.BlockManager.initialize(BlockManager.scala:227) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:518) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-19 09:00:00 +[2023-07-19 09:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-19 09:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-19 10:00:00 +[2023-07-19 10:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-19 10:01:03+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-19 11:00:00 +[2023-07-19 11:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-19 11:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-19 12:00:00 +[2023-07-19 12:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-19 12:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.NullPointerException + at org.apache.spark.SparkContext.<init>(SparkContext.scala:567) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-19 13:00:00 +[2023-07-19 13:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-19 13:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-19 14:00:00 +[2023-07-19 14:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-19 14:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-19 15:00:00 +[2023-07-19 15:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-19 15:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-19 16:00:00 +[2023-07-19 16:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-19 16:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-19 17:00:00 +[2023-07-19 17:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-19 17:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.NullPointerException + at org.apache.spark.storage.BlockManagerMaster.registerBlockManager(BlockManagerMaster.scala:64) + at org.apache.spark.storage.BlockManager.initialize(BlockManager.scala:227) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:518) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-19 18:00:00 +[2023-07-19 18:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-19 18:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-19 19:00:00 +[2023-07-19 19:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-19 19:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-19 20:00:00 +[2023-07-19 20:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-19 20:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-19 21:00:00 +[2023-07-19 21:01:01+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-19 21:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-19 22:00:00 +[2023-07-19 22:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-19 22:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.NullPointerException + at org.apache.spark.SparkContext.<init>(SparkContext.scala:567) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-19 23:00:00 +[2023-07-19 23:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-19 23:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-20 00:00:00 +[2023-07-20 00:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-20 00:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-20 01:00:00 +[2023-07-20 01:01:01+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-20 01:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-20 02:00:00 +[2023-07-20 02:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-20 02:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-20 03:00:00 +[2023-07-20 03:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-20 03:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-20 04:00:00 +[2023-07-20 04:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-20 04:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.NullPointerException + at org.apache.spark.SparkContext.<init>(SparkContext.scala:567) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-20 05:00:00 +[2023-07-20 05:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-20 05:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.NullPointerException + at org.apache.spark.SparkContext.<init>(SparkContext.scala:567) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-20 06:00:00 +[2023-07-20 06:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-20 06:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-20 07:00:00 +[2023-07-20 07:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-20 07:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-20 08:00:00 +[2023-07-20 08:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-20 08:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-20 09:00:00 +[2023-07-20 09:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-20 09:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-20 10:00:00 +[2023-07-20 10:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-20 10:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.NullPointerException + at org.apache.spark.SparkContext.<init>(SparkContext.scala:567) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-20 11:00:00 +[2023-07-20 11:01:01+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-20 11:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-20 12:00:00 +[2023-07-20 12:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-20 12:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-20 13:00:00 +[2023-07-20 13:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-20 13:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.NullPointerException + at org.apache.spark.SparkContext.<init>(SparkContext.scala:567) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-20 14:00:00 +[2023-07-20 14:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-20 14:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-20 15:00:00 +[2023-07-20 15:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-20 15:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-20 16:00:00 +[2023-07-20 16:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-20 16:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-20 17:00:00 +[2023-07-20 17:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-20 17:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.NullPointerException + at org.apache.spark.SparkContext.<init>(SparkContext.scala:567) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-20 18:00:00 +[2023-07-20 18:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-20 18:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-20 19:00:00 +[2023-07-20 19:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-20 19:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-20 20:00:00 +[2023-07-20 20:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-20 20:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-20 21:00:00 +[2023-07-20 21:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-20 21:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-20 22:00:00 +[2023-07-20 22:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-20 22:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-20 23:00:00 +[2023-07-20 23:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-20 23:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.NullPointerException + at org.apache.spark.SparkContext.<init>(SparkContext.scala:567) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-21 00:00:00 +[2023-07-21 00:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-21 00:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-21 01:00:00 +[2023-07-21 01:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-21 01:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-21 02:00:00 +[2023-07-21 02:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-21 02:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.NullPointerException + at org.apache.spark.SparkContext.<init>(SparkContext.scala:567) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-21 03:00:00 +[2023-07-21 03:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-21 03:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-21 04:00:00 +[2023-07-21 04:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-21 04:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-21 05:00:00 +[2023-07-21 05:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-21 05:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-21 06:00:00 +[2023-07-21 06:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-21 06:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.NullPointerException + at org.apache.spark.SparkContext.<init>(SparkContext.scala:567) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-21 07:00:00 +[2023-07-21 07:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-21 07:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-21 08:00:00 +[2023-07-21 08:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-21 08:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-21 09:00:00 +[2023-07-21 09:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-21 09:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-21 10:00:00 +[2023-07-21 10:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-21 10:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-21 11:00:00 +[2023-07-21 11:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-21 11:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-21 12:00:00 +[2023-07-21 12:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-21 12:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-21 13:00:00 +[2023-07-21 13:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-21 13:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-21 14:00:00 +[2023-07-21 14:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-21 14:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-21 15:00:00 +[2023-07-21 15:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-21 15:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-21 16:00:00 +[2023-07-21 16:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-21 16:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-21 17:00:00 +[2023-07-21 17:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-21 17:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-21 18:00:00 +[2023-07-21 18:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-21 18:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-21 19:00:00 +[2023-07-21 19:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-21 19:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-21 20:00:00 +[2023-07-21 20:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-21 20:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-21 21:00:00 +[2023-07-21 21:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-21 21:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-21 22:00:00 +[2023-07-21 22:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-21 22:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-21 23:00:00 +[2023-07-21 23:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-21 23:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-22 00:00:00 +[2023-07-22 00:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-22 00:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-22 01:00:00 +[2023-07-22 01:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-22 01:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-22 02:00:00 +[2023-07-22 02:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-22 02:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-22 03:00:00 +[2023-07-22 03:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-22 03:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.NullPointerException + at org.apache.spark.SparkContext.<init>(SparkContext.scala:567) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-22 04:00:00 +[2023-07-22 04:01:01+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-22 04:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.NullPointerException + at org.apache.spark.SparkContext.<init>(SparkContext.scala:567) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-22 05:00:00 +[2023-07-22 05:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-22 05:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-22 06:00:00 +[2023-07-22 06:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-22 06:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.NullPointerException + at org.apache.spark.storage.BlockManagerMaster.registerBlockManager(BlockManagerMaster.scala:64) + at org.apache.spark.storage.BlockManager.initialize(BlockManager.scala:227) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:518) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-22 07:00:00 +[2023-07-22 07:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-22 07:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-22 08:00:00 +[2023-07-22 08:01:01+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-22 08:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-22 09:00:00 +[2023-07-22 09:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-22 09:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.NullPointerException + at org.apache.spark.SparkContext.<init>(SparkContext.scala:567) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-22 10:00:00 +[2023-07-22 10:01:01+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-22 10:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.NullPointerException + at org.apache.spark.SparkContext.<init>(SparkContext.scala:567) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-22 11:00:00 +[2023-07-22 11:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-22 11:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.NullPointerException + at org.apache.spark.SparkContext.<init>(SparkContext.scala:567) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-22 12:00:00 +[2023-07-22 12:01:01+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-22 12:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.NullPointerException + at org.apache.spark.SparkContext.<init>(SparkContext.scala:567) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-22 13:00:00 +[2023-07-22 13:01:01+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-22 13:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-22 14:00:00 +[2023-07-22 14:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-22 14:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-22 15:00:00 +[2023-07-22 15:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-22 15:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-22 16:00:00 +[2023-07-22 16:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-22 16:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-22 17:00:00 +[2023-07-22 17:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-22 17:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-22 18:00:00 +[2023-07-22 18:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-22 18:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-22 19:00:00 +[2023-07-22 19:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-22 19:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-22 20:00:00 +[2023-07-22 20:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-22 20:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-22 21:00:00 +[2023-07-22 21:01:01+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-22 21:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-22 22:00:00 +[2023-07-22 22:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-22 22:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-22 23:00:00 +[2023-07-22 23:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-22 23:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.NullPointerException + at org.apache.spark.SparkContext.<init>(SparkContext.scala:567) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-23 00:00:00 +[2023-07-23 00:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-23 00:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.NullPointerException + at org.apache.spark.SparkContext.<init>(SparkContext.scala:567) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-23 01:00:00 +[2023-07-23 01:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-23 01:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.NullPointerException + at org.apache.spark.SparkContext.<init>(SparkContext.scala:567) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-23 02:00:00 +[2023-07-23 02:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-23 02:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.NullPointerException + at org.apache.spark.SparkContext.<init>(SparkContext.scala:567) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-23 03:00:00 +[2023-07-23 03:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-23 03:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-23 04:00:00 +[2023-07-23 04:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-23 04:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-23 05:00:00 +[2023-07-23 05:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-23 05:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-23 06:00:00 +[2023-07-23 06:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-23 06:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.NullPointerException + at org.apache.spark.SparkContext.<init>(SparkContext.scala:567) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-23 07:00:00 +[2023-07-23 07:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-23 07:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-23 08:00:00 +[2023-07-23 08:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-23 08:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-23 09:00:00 +[2023-07-23 09:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-23 09:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-23 10:00:00 +[2023-07-23 10:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-23 10:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.NullPointerException + at org.apache.spark.storage.BlockManagerMaster.registerBlockManager(BlockManagerMaster.scala:64) + at org.apache.spark.storage.BlockManager.initialize(BlockManager.scala:227) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:518) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-23 11:00:00 +[2023-07-23 11:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-23 11:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-23 12:00:00 +[2023-07-23 12:01:01+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-23 12:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-23 13:00:00 +[2023-07-23 13:01:01+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-23 13:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-23 14:00:00 +[2023-07-23 14:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-23 14:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-23 15:00:00 +[2023-07-23 15:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-23 15:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-23 16:00:00 +[2023-07-23 16:01:01+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-23 16:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-23 17:00:00 +[2023-07-23 17:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-23 17:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-23 18:00:00 +[2023-07-23 18:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-23 18:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-23 19:00:00 +[2023-07-23 19:01:01+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-23 19:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-23 20:00:00 +[2023-07-23 20:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-23 20:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +org.apache.spark.SparkException: Exception thrown in awaitResult: + at org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:205) + at org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75) + at org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:92) + at org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:76) + at org.apache.spark.storage.BlockManagerMaster.registerBlockManager(BlockManagerMaster.scala:64) + at org.apache.spark.storage.BlockManager.initialize(BlockManager.scala:227) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:518) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +Caused by: org.apache.spark.SparkException: Could not find BlockManagerMaster. + at org.apache.spark.rpc.netty.Dispatcher.postMessage(Dispatcher.scala:157) + at org.apache.spark.rpc.netty.Dispatcher.postLocalMessage(Dispatcher.scala:132) + at org.apache.spark.rpc.netty.NettyRpcEnv.ask(NettyRpcEnv.scala:228) + at org.apache.spark.rpc.netty.NettyRpcEndpointRef.ask(NettyRpcEnv.scala:522) + at org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:91) + ... 30 more +IpLearningApplication 程序运行 - 2023-07-23 21:00:00 +[2023-07-23 21:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-23 21:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-23 22:00:00 +[2023-07-23 22:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-23 22:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-23 23:00:00 +[2023-07-23 23:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-23 23:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.NullPointerException + at org.apache.spark.SparkContext.<init>(SparkContext.scala:567) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-24 00:00:00 +[2023-07-24 00:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-24 00:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-24 01:00:00 +[2023-07-24 01:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-24 01:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-24 02:00:00 +[2023-07-24 02:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-24 02:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-24 03:00:00 +[2023-07-24 03:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-24 03:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-24 04:00:00 +[2023-07-24 04:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-24 04:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-24 05:00:00 +[2023-07-24 05:01:01+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-24 05:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-24 06:00:00 +[2023-07-24 06:01:01+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-24 06:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-24 07:00:00 +[2023-07-24 07:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-24 07:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-24 08:00:00 +[2023-07-24 08:01:01+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-24 08:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-24 09:00:00 +[2023-07-24 09:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-24 09:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-24 10:00:00 +[2023-07-24 10:01:02+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-24 10:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpLearningApplication 程序运行 - 2023-07-24 11:00:00 +[2023-07-24 11:01:01+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-24 11:01:02+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getRelationFqdnLocateIpDf(BaseClickhouseData.scala:114) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeRelationFqdnLocateIp(MergeDataFrame.scala:52) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipLearning$2.apply(UpdateDocument.scala:22) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipLearning(UpdateDocument.scala:22) + at cn.ac.iie.main.IpLearningApplication$.main(IpLearningApplication.scala:8) + at cn.ac.iie.main.IpLearningApplication.main(IpLearningApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) diff --git a/MPE/spark/iplearning/logs/IpRecommendApplication/running-2023-07.log b/MPE/spark/iplearning/logs/IpRecommendApplication/running-2023-07.log new file mode 100644 index 0000000..905893a --- /dev/null +++ b/MPE/spark/iplearning/logs/IpRecommendApplication/running-2023-07.log @@ -0,0 +1,9640 @@ +IpRecommendApplication 程序运行 - 2023-07-12 04:41:34 +[2023-07-12 04:42:36+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-12 04:42:36+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-12 05:01:02 +[2023-07-12 05:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-12 05:02:04+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-12 06:01:02 +[2023-07-12 06:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-12 06:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-12 07:01:02 +[2023-07-12 07:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-12 07:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.NullPointerException + at org.apache.spark.SparkContext.<init>(SparkContext.scala:567) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-12 08:01:02 +[2023-07-12 08:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-12 08:02:04+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.NullPointerException + at org.apache.spark.storage.BlockManagerMaster.registerBlockManager(BlockManagerMaster.scala:64) + at org.apache.spark.storage.BlockManager.initialize(BlockManager.scala:227) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:518) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-12 09:01:02 +[2023-07-12 09:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-12 09:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-12 10:01:02 +[2023-07-12 10:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-12 10:02:04+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-12 11:01:02 +[2023-07-12 11:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-12 11:02:04+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.NullPointerException + at org.apache.spark.storage.BlockManagerMaster.registerBlockManager(BlockManagerMaster.scala:64) + at org.apache.spark.storage.BlockManager.initialize(BlockManager.scala:227) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:518) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-12 12:01:02 +[2023-07-12 12:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-12 12:02:04+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-12 13:01:02 +[2023-07-12 13:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-12 13:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-12 14:01:02 +[2023-07-12 14:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-12 14:02:04+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-12 15:01:02 +[2023-07-12 15:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-12 15:02:04+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-12 16:01:03 +[2023-07-12 16:02:05+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-12 16:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-12 17:01:02 +[2023-07-12 17:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-12 17:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-12 18:01:02 +[2023-07-12 18:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-12 18:02:04+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-12 19:01:02 +[2023-07-12 19:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-12 19:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.NullPointerException + at org.apache.spark.SparkContext.<init>(SparkContext.scala:567) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-12 20:01:02 +[2023-07-12 20:02:05+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-12 20:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-12 21:01:02 +[2023-07-12 21:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-12 21:02:04+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-12 22:01:02 +[2023-07-12 22:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-12 22:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.NullPointerException + at org.apache.spark.SparkContext.<init>(SparkContext.scala:567) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-12 23:01:02 +[2023-07-12 23:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-12 23:02:04+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-13 00:01:02 +[2023-07-13 00:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-13 00:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.NullPointerException + at org.apache.spark.SparkContext.<init>(SparkContext.scala:567) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-13 01:01:02 +[2023-07-13 01:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-13 01:02:04+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-13 02:01:02 +[2023-07-13 02:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-13 02:02:04+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-13 03:01:02 +[2023-07-13 03:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-13 03:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-13 04:01:02 +[2023-07-13 04:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-13 04:02:04+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-13 05:01:02 +[2023-07-13 05:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-13 05:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.NullPointerException + at org.apache.spark.SparkContext.<init>(SparkContext.scala:567) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-13 06:01:02 +[2023-07-13 06:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-13 06:02:04+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-13 07:01:02 +[2023-07-13 07:02:05+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-13 07:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-13 08:01:02 +[2023-07-13 08:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-13 08:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.NullPointerException + at org.apache.spark.SparkContext.<init>(SparkContext.scala:567) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-13 09:01:02 +[2023-07-13 09:02:05+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-13 09:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-13 10:01:02 +[2023-07-13 10:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-13 10:02:04+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-13 11:01:02 +[2023-07-13 11:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-13 11:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-13 12:01:03 +[2023-07-13 12:02:05+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-13 12:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-13 13:01:02 +[2023-07-13 13:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-13 13:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-13 14:01:02 +[2023-07-13 14:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-13 14:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-13 15:01:02 +[2023-07-13 15:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-13 15:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-13 16:01:02 +[2023-07-13 16:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-13 16:02:04+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.NullPointerException + at org.apache.spark.storage.BlockManagerMaster.registerBlockManager(BlockManagerMaster.scala:64) + at org.apache.spark.storage.BlockManager.initialize(BlockManager.scala:227) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:518) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-13 17:01:03 +[2023-07-13 17:02:05+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-13 17:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-13 18:01:02 +[2023-07-13 18:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-13 18:02:04+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.NullPointerException + at org.apache.spark.storage.BlockManagerMaster.registerBlockManager(BlockManagerMaster.scala:64) + at org.apache.spark.storage.BlockManager.initialize(BlockManager.scala:227) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:518) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-13 19:01:02 +[2023-07-13 19:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-13 19:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.NullPointerException + at org.apache.spark.SparkContext.<init>(SparkContext.scala:567) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-13 20:01:02 +[2023-07-13 20:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-13 20:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-13 21:01:02 +[2023-07-13 21:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-13 21:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-13 22:01:02 +[2023-07-13 22:02:05+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-13 22:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-13 23:01:02 +[2023-07-13 23:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-13 23:02:04+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-14 00:01:02 +[2023-07-14 00:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-14 00:02:04+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.NullPointerException + at org.apache.spark.storage.BlockManagerMaster.registerBlockManager(BlockManagerMaster.scala:64) + at org.apache.spark.storage.BlockManager.initialize(BlockManager.scala:227) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:518) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-14 01:01:02 +[2023-07-14 01:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-14 01:02:04+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-14 02:01:02 +[2023-07-14 02:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-14 02:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-14 03:01:03 +[2023-07-14 03:02:05+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-14 03:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-14 04:01:02 +[2023-07-14 04:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-14 04:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-14 05:01:02 +[2023-07-14 05:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-14 05:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-14 06:01:02 +[2023-07-14 06:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-14 06:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.NullPointerException + at org.apache.spark.SparkContext.<init>(SparkContext.scala:567) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-14 07:01:02 +[2023-07-14 07:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-14 07:02:04+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-14 08:01:02 +[2023-07-14 08:02:05+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-14 08:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-14 09:01:02 +[2023-07-14 09:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-14 09:02:04+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-14 10:01:02 +[2023-07-14 10:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-14 10:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.NullPointerException + at org.apache.spark.SparkContext.<init>(SparkContext.scala:567) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-14 11:01:04 +[2023-07-14 11:02:06+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-14 11:02:06+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-14 12:01:02 +[2023-07-14 12:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-14 12:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-14 13:01:02 +[2023-07-14 13:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-14 13:02:04+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-14 14:01:02 +[2023-07-14 14:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-14 14:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-14 15:01:04 +[2023-07-14 15:02:06+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-14 15:02:06+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-14 16:01:03 +[2023-07-14 16:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-14 16:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-14 17:01:03 +[2023-07-14 17:02:06+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-14 17:02:06+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.NullPointerException + at org.apache.spark.storage.BlockManagerMaster.registerBlockManager(BlockManagerMaster.scala:64) + at org.apache.spark.storage.BlockManager.initialize(BlockManager.scala:227) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:518) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-14 18:01:02 +[2023-07-14 18:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-14 18:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-14 19:01:03 +[2023-07-14 19:02:05+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-14 19:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-14 20:01:02 +[2023-07-14 20:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-14 20:02:04+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-14 21:01:02 +[2023-07-14 21:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-14 21:02:04+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-14 22:01:02 +[2023-07-14 22:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-14 22:02:04+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-14 23:01:02 +[2023-07-14 23:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-14 23:02:04+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-15 00:01:02 +[2023-07-15 00:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-15 00:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.NullPointerException + at org.apache.spark.SparkContext.<init>(SparkContext.scala:567) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-15 01:01:02 +[2023-07-15 01:02:05+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-15 01:02:06+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.NullPointerException + at org.apache.spark.SparkContext.<init>(SparkContext.scala:567) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-15 02:01:02 +[2023-07-15 02:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-15 02:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.NullPointerException + at org.apache.spark.SparkContext.<init>(SparkContext.scala:567) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-15 03:01:02 +[2023-07-15 03:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-15 03:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-15 04:01:02 +[2023-07-15 04:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-15 04:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.NullPointerException + at org.apache.spark.storage.BlockManagerMaster.registerBlockManager(BlockManagerMaster.scala:64) + at org.apache.spark.storage.BlockManager.initialize(BlockManager.scala:227) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:518) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-15 05:01:02 +[2023-07-15 05:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-15 05:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.NullPointerException + at org.apache.spark.SparkContext.<init>(SparkContext.scala:567) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-15 06:01:02 +[2023-07-15 06:02:05+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-15 06:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-15 07:01:02 +[2023-07-15 07:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-15 07:02:04+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-15 08:01:02 +[2023-07-15 08:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-15 08:02:04+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-15 09:01:02 +[2023-07-15 09:02:05+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-15 09:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-15 10:01:02 +[2023-07-15 10:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-15 10:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-15 11:01:02 +[2023-07-15 11:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-15 11:02:04+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.NullPointerException + at org.apache.spark.storage.BlockManagerMaster.registerBlockManager(BlockManagerMaster.scala:64) + at org.apache.spark.storage.BlockManager.initialize(BlockManager.scala:227) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:518) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-15 12:01:02 +[2023-07-15 12:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-15 12:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-15 13:01:02 +[2023-07-15 13:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-15 13:02:04+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-15 14:01:03 +[2023-07-15 14:02:05+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-15 14:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-15 15:01:02 +[2023-07-15 15:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-15 15:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.NullPointerException + at org.apache.spark.storage.BlockManagerMaster.registerBlockManager(BlockManagerMaster.scala:64) + at org.apache.spark.storage.BlockManager.initialize(BlockManager.scala:227) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:518) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-15 16:01:02 +[2023-07-15 16:02:05+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-15 16:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-15 17:01:02 +[2023-07-15 17:02:05+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-15 17:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-15 18:01:02 +[2023-07-15 18:02:05+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-15 18:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-15 19:01:02 +[2023-07-15 19:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-15 19:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.NullPointerException + at org.apache.spark.SparkContext.<init>(SparkContext.scala:567) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-15 20:01:02 +[2023-07-15 20:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-15 20:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-15 21:01:02 +[2023-07-15 21:02:05+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-15 21:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-15 22:01:02 +[2023-07-15 22:02:05+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-15 22:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-15 23:01:03 +[2023-07-15 23:02:05+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-15 23:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.NullPointerException + at org.apache.spark.SparkContext.<init>(SparkContext.scala:567) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-16 00:01:02 +[2023-07-16 00:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-16 00:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-16 01:01:02 +[2023-07-16 01:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-16 01:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-16 02:01:02 +[2023-07-16 02:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-16 02:02:04+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-16 03:01:02 +[2023-07-16 03:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-16 03:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-16 04:01:02 +[2023-07-16 04:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-16 04:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-16 05:01:02 +[2023-07-16 05:02:05+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-16 05:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-16 06:01:02 +[2023-07-16 06:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-16 06:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-16 07:01:02 +[2023-07-16 07:02:05+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-16 07:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-16 08:01:03 +[2023-07-16 08:02:05+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-16 08:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.NullPointerException + at org.apache.spark.SparkContext.<init>(SparkContext.scala:567) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-16 09:01:02 +[2023-07-16 09:02:05+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-16 09:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-16 10:01:02 +[2023-07-16 10:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-16 10:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-16 11:01:03 +[2023-07-16 11:02:05+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-16 11:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-16 12:01:02 +[2023-07-16 12:02:05+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-16 12:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-16 13:01:02 +[2023-07-16 13:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-16 13:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.NullPointerException + at org.apache.spark.SparkContext.<init>(SparkContext.scala:567) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-16 14:01:02 +[2023-07-16 14:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-16 14:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.NullPointerException + at org.apache.spark.SparkContext.<init>(SparkContext.scala:567) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-16 15:01:03 +[2023-07-16 15:02:05+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-16 15:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-16 16:01:02 +[2023-07-16 16:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-16 16:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-16 17:01:02 +[2023-07-16 17:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-16 17:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.NullPointerException + at org.apache.spark.SparkContext.<init>(SparkContext.scala:567) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-16 18:01:02 +[2023-07-16 18:02:05+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-16 18:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-16 19:01:02 +[2023-07-16 19:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-16 19:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.NullPointerException + at org.apache.spark.SparkContext.<init>(SparkContext.scala:567) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-16 20:01:02 +[2023-07-16 20:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-16 20:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.NullPointerException + at org.apache.spark.SparkContext.<init>(SparkContext.scala:567) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-16 21:01:02 +[2023-07-16 21:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-16 21:02:04+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-16 22:01:02 +[2023-07-16 22:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-16 22:02:04+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-16 23:01:02 +[2023-07-16 23:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-16 23:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-17 00:01:03 +[2023-07-17 00:02:05+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-17 00:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-17 01:01:02 +[2023-07-17 01:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-17 01:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.NullPointerException + at org.apache.spark.SparkContext.<init>(SparkContext.scala:567) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-17 02:01:02 +[2023-07-17 02:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-17 02:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-17 03:01:02 +[2023-07-17 03:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-17 03:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-17 04:01:02 +[2023-07-17 04:02:05+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-17 04:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-17 05:01:02 +[2023-07-17 05:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-17 05:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.NullPointerException + at org.apache.spark.SparkContext.<init>(SparkContext.scala:567) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-17 06:01:02 +[2023-07-17 06:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-17 06:02:04+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-17 07:01:02 +[2023-07-17 07:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-17 07:02:04+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-17 08:01:03 +[2023-07-17 08:02:05+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-17 08:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.NullPointerException + at org.apache.spark.SparkContext.<init>(SparkContext.scala:567) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-17 09:01:02 +[2023-07-17 09:02:05+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-17 09:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.NullPointerException + at org.apache.spark.SparkContext.<init>(SparkContext.scala:567) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-17 10:01:02 +[2023-07-17 10:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-17 10:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.NullPointerException + at org.apache.spark.SparkContext.<init>(SparkContext.scala:567) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-17 11:01:02 +[2023-07-17 11:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-17 11:02:04+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.NullPointerException + at org.apache.spark.storage.BlockManagerMaster.registerBlockManager(BlockManagerMaster.scala:64) + at org.apache.spark.storage.BlockManager.initialize(BlockManager.scala:227) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:518) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-17 12:01:02 +[2023-07-17 12:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-17 12:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.NullPointerException + at org.apache.spark.SparkContext.<init>(SparkContext.scala:567) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-17 13:01:02 +[2023-07-17 13:02:05+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-17 13:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.NullPointerException + at org.apache.spark.SparkContext.<init>(SparkContext.scala:567) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-17 14:01:02 +[2023-07-17 14:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-17 14:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.NullPointerException + at org.apache.spark.SparkContext.<init>(SparkContext.scala:567) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-17 15:01:02 +[2023-07-17 15:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-17 15:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-17 16:01:03 +[2023-07-17 16:02:05+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-17 16:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-17 17:01:02 +[2023-07-17 17:02:05+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-17 17:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-17 18:01:02 +[2023-07-17 18:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-17 18:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.NullPointerException + at org.apache.spark.SparkContext.<init>(SparkContext.scala:567) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-17 19:01:02 +[2023-07-17 19:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-17 19:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-17 20:01:02 +[2023-07-17 20:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-17 20:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-17 21:01:02 +[2023-07-17 21:02:05+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-17 21:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-17 22:01:03 +[2023-07-17 22:02:05+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-17 22:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-17 23:01:02 +[2023-07-17 23:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-17 23:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-18 00:01:02 +[2023-07-18 00:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-18 00:02:04+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-18 01:01:02 +[2023-07-18 01:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-18 01:02:04+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-18 02:01:02 +[2023-07-18 02:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-18 02:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.NullPointerException + at org.apache.spark.SparkContext.<init>(SparkContext.scala:567) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-18 03:01:02 +[2023-07-18 03:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-18 03:02:04+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-18 04:01:02 +[2023-07-18 04:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-18 04:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.NullPointerException + at org.apache.spark.SparkContext.<init>(SparkContext.scala:567) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-18 05:01:02 +[2023-07-18 05:02:05+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-18 05:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-18 06:01:02 +[2023-07-18 06:02:05+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-18 06:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-18 07:01:02 +[2023-07-18 07:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-18 07:02:04+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.NullPointerException + at org.apache.spark.storage.BlockManagerMaster.registerBlockManager(BlockManagerMaster.scala:64) + at org.apache.spark.storage.BlockManager.initialize(BlockManager.scala:227) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:518) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-18 08:01:02 +[2023-07-18 08:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-18 08:02:04+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-18 09:01:02 +[2023-07-18 09:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-18 09:02:04+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-18 10:01:02 +[2023-07-18 10:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-18 10:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.NullPointerException + at org.apache.spark.SparkContext.<init>(SparkContext.scala:567) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-18 11:01:02 +[2023-07-18 11:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-18 11:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.NullPointerException + at org.apache.spark.SparkContext.<init>(SparkContext.scala:567) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-18 12:01:02 +[2023-07-18 12:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-18 12:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.NullPointerException + at org.apache.spark.SparkContext.<init>(SparkContext.scala:567) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-18 13:01:02 +[2023-07-18 13:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-18 13:02:04+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-18 14:01:02 +[2023-07-18 14:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-18 14:02:04+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.NullPointerException + at org.apache.spark.storage.BlockManagerMaster.registerBlockManager(BlockManagerMaster.scala:64) + at org.apache.spark.storage.BlockManager.initialize(BlockManager.scala:227) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:518) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-18 15:01:02 +[2023-07-18 15:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-18 15:02:04+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-18 16:01:02 +[2023-07-18 16:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-18 16:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.NullPointerException + at org.apache.spark.SparkContext.<init>(SparkContext.scala:567) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-18 17:01:02 +[2023-07-18 17:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-18 17:02:04+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-18 18:01:02 +[2023-07-18 18:02:05+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-18 18:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-18 19:01:02 +[2023-07-18 19:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-18 19:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.NullPointerException + at org.apache.spark.SparkContext.<init>(SparkContext.scala:567) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-18 20:01:02 +[2023-07-18 20:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-18 20:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-18 21:01:02 +[2023-07-18 21:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-18 21:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-18 22:01:02 +[2023-07-18 22:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-18 22:02:04+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.NullPointerException + at org.apache.spark.storage.BlockManagerMaster.registerBlockManager(BlockManagerMaster.scala:64) + at org.apache.spark.storage.BlockManager.initialize(BlockManager.scala:227) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:518) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-18 23:01:02 +[2023-07-18 23:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-18 23:02:04+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-19 00:01:02 +[2023-07-19 00:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-19 00:02:04+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-19 01:01:02 +[2023-07-19 01:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-19 01:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.NullPointerException + at org.apache.spark.SparkContext.<init>(SparkContext.scala:567) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-19 02:01:02 +[2023-07-19 02:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-19 02:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-19 03:01:02 +[2023-07-19 03:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-19 03:02:04+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-19 04:01:03 +[2023-07-19 04:02:05+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-19 04:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-19 05:01:02 +[2023-07-19 05:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-19 05:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.NullPointerException + at org.apache.spark.SparkContext.<init>(SparkContext.scala:567) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-19 06:01:02 +[2023-07-19 06:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-19 06:02:04+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.NullPointerException + at org.apache.spark.SparkContext.<init>(SparkContext.scala:567) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-19 07:01:03 +[2023-07-19 07:02:05+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-19 07:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.NullPointerException + at org.apache.spark.SparkContext.<init>(SparkContext.scala:567) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-19 08:01:02 +[2023-07-19 08:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-19 08:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-19 09:01:02 +[2023-07-19 09:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-19 09:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.NullPointerException + at org.apache.spark.SparkContext.<init>(SparkContext.scala:567) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-19 10:01:03 +[2023-07-19 10:02:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-19 10:02:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-19 11:01:02 +[2023-07-19 11:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-19 11:02:04+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-19 12:01:02 +[2023-07-19 12:02:05+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-19 12:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-19 13:01:02 +[2023-07-19 13:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-19 13:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-19 14:01:02 +[2023-07-19 14:02:05+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-19 14:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.NullPointerException + at org.apache.spark.storage.BlockManagerMaster.registerBlockManager(BlockManagerMaster.scala:64) + at org.apache.spark.storage.BlockManager.initialize(BlockManager.scala:227) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:518) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-19 15:01:02 +[2023-07-19 15:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-19 15:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-19 16:01:02 +[2023-07-19 16:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-19 16:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.NullPointerException + at org.apache.spark.SparkContext.<init>(SparkContext.scala:567) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-19 17:01:02 +[2023-07-19 17:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-19 17:02:04+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-19 18:01:02 +[2023-07-19 18:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-19 18:02:04+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-19 19:01:02 +[2023-07-19 19:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-19 19:02:04+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-19 20:01:02 +[2023-07-19 20:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-19 20:02:04+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-19 21:01:02 +[2023-07-19 21:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-19 21:02:04+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-19 22:01:02 +[2023-07-19 22:02:05+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-19 22:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-19 23:01:02 +[2023-07-19 23:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-19 23:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-20 00:01:02 +[2023-07-20 00:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-20 00:02:04+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-20 01:01:02 +[2023-07-20 01:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-20 01:02:04+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-20 02:01:02 +[2023-07-20 02:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-20 02:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-20 03:01:02 +[2023-07-20 03:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-20 03:02:04+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-20 04:01:02 +[2023-07-20 04:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-20 04:02:04+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-20 05:01:02 +[2023-07-20 05:02:05+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-20 05:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-20 06:01:02 +[2023-07-20 06:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-20 06:02:04+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-20 07:01:02 +[2023-07-20 07:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-20 07:02:04+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-20 08:01:02 +[2023-07-20 08:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-20 08:02:04+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-20 09:01:02 +[2023-07-20 09:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-20 09:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-20 10:01:02 +[2023-07-20 10:02:05+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-20 10:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-20 11:01:02 +[2023-07-20 11:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-20 11:02:04+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-20 12:01:02 +[2023-07-20 12:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-20 12:02:04+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-20 13:01:02 +[2023-07-20 13:02:05+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-20 13:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-20 14:01:02 +[2023-07-20 14:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-20 14:02:04+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-20 15:01:02 +[2023-07-20 15:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-20 15:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.NullPointerException + at org.apache.spark.SparkContext.<init>(SparkContext.scala:567) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-20 16:01:02 +[2023-07-20 16:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-20 16:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-20 17:01:03 +[2023-07-20 17:02:05+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-20 17:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-20 18:01:02 +[2023-07-20 18:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-20 18:02:04+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-20 19:01:02 +[2023-07-20 19:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-20 19:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-20 20:01:02 +[2023-07-20 20:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-20 20:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.NullPointerException + at org.apache.spark.SparkContext.<init>(SparkContext.scala:567) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-20 21:01:02 +[2023-07-20 21:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-20 21:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.NullPointerException + at org.apache.spark.SparkContext.<init>(SparkContext.scala:567) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-20 22:01:02 +[2023-07-20 22:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-20 22:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-20 23:01:03 +[2023-07-20 23:02:05+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-20 23:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-21 00:01:02 +[2023-07-21 00:02:05+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-21 00:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-21 01:01:02 +[2023-07-21 01:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-21 01:02:04+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.NullPointerException + at org.apache.spark.storage.BlockManagerMaster.registerBlockManager(BlockManagerMaster.scala:64) + at org.apache.spark.storage.BlockManager.initialize(BlockManager.scala:227) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:518) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-21 02:01:03 +[2023-07-21 02:02:05+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-21 02:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-21 03:01:02 +[2023-07-21 03:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-21 03:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-21 04:01:02 +[2023-07-21 04:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-21 04:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-21 05:01:03 +[2023-07-21 05:02:05+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-21 05:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-21 06:01:03 +[2023-07-21 06:02:05+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-21 06:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-21 07:01:02 +[2023-07-21 07:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-21 07:02:04+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-21 08:01:02 +[2023-07-21 08:02:05+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-21 08:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-21 09:01:02 +[2023-07-21 09:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-21 09:02:04+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-21 10:01:02 +[2023-07-21 10:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-21 10:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-21 11:01:02 +[2023-07-21 11:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-21 11:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.NullPointerException + at org.apache.spark.SparkContext.<init>(SparkContext.scala:567) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-21 12:01:03 +[2023-07-21 12:02:05+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-21 12:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-21 13:01:03 +[2023-07-21 13:02:05+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-21 13:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-21 14:01:02 +[2023-07-21 14:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-21 14:02:04+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-21 15:01:02 +[2023-07-21 15:02:05+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-21 15:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-21 16:01:02 +[2023-07-21 16:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-21 16:02:04+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-21 17:01:02 +[2023-07-21 17:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-21 17:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.NullPointerException + at org.apache.spark.SparkContext.<init>(SparkContext.scala:567) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-21 18:01:02 +[2023-07-21 18:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-21 18:02:04+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-21 19:01:02 +[2023-07-21 19:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-21 19:02:04+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-21 20:01:02 +[2023-07-21 20:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-21 20:02:04+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-21 21:01:02 +[2023-07-21 21:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-21 21:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-21 22:01:02 +[2023-07-21 22:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-21 22:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-21 23:01:02 +[2023-07-21 23:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-21 23:02:04+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-22 00:01:02 +[2023-07-22 00:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-22 00:02:04+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-22 01:01:02 +[2023-07-22 01:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-22 01:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-22 02:01:02 +[2023-07-22 02:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-22 02:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.NullPointerException + at org.apache.spark.SparkContext.<init>(SparkContext.scala:567) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-22 03:01:03 +[2023-07-22 03:02:05+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-22 03:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-22 04:01:02 +[2023-07-22 04:02:05+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-22 04:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.NullPointerException + at org.apache.spark.SparkContext.<init>(SparkContext.scala:567) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-22 05:01:02 +[2023-07-22 05:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-22 05:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-22 06:01:02 +[2023-07-22 06:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-22 06:02:04+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-22 07:01:02 +[2023-07-22 07:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-22 07:02:04+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-22 08:01:02 +[2023-07-22 08:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-22 08:02:04+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-22 09:01:03 +[2023-07-22 09:02:05+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-22 09:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-22 10:01:02 +[2023-07-22 10:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-22 10:02:04+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-22 11:01:03 +[2023-07-22 11:02:05+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-22 11:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-22 12:01:02 +[2023-07-22 12:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-22 12:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-22 13:01:02 +[2023-07-22 13:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-22 13:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.NullPointerException + at org.apache.spark.SparkContext.<init>(SparkContext.scala:567) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-22 14:01:02 +[2023-07-22 14:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-22 14:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-22 15:01:02 +[2023-07-22 15:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-22 15:02:04+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-22 16:01:02 +[2023-07-22 16:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-22 16:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-22 17:01:02 +[2023-07-22 17:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-22 17:02:04+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.NullPointerException + at org.apache.spark.storage.BlockManagerMaster.registerBlockManager(BlockManagerMaster.scala:64) + at org.apache.spark.storage.BlockManager.initialize(BlockManager.scala:227) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:518) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-22 18:01:02 +[2023-07-22 18:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-22 18:02:04+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-22 19:01:02 +[2023-07-22 19:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-22 19:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.NullPointerException + at org.apache.spark.SparkContext.<init>(SparkContext.scala:567) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-22 20:01:02 +[2023-07-22 20:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-22 20:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.NullPointerException + at org.apache.spark.SparkContext.<init>(SparkContext.scala:567) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-22 21:01:02 +[2023-07-22 21:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-22 21:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-22 22:01:02 +[2023-07-22 22:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-22 22:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.NullPointerException + at org.apache.spark.SparkContext.<init>(SparkContext.scala:567) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-22 23:01:03 +[2023-07-22 23:02:05+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-22 23:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-23 00:01:02 +[2023-07-23 00:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-23 00:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-23 01:01:03 +[2023-07-23 01:02:05+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-23 01:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.NullPointerException + at org.apache.spark.SparkContext.<init>(SparkContext.scala:567) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-23 02:01:03 +[2023-07-23 02:02:05+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-23 02:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-23 03:01:02 +[2023-07-23 03:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-23 03:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.NullPointerException + at org.apache.spark.storage.BlockManagerMaster.registerBlockManager(BlockManagerMaster.scala:64) + at org.apache.spark.storage.BlockManager.initialize(BlockManager.scala:227) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:518) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-23 04:01:02 +[2023-07-23 04:02:05+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-23 04:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-23 05:01:02 +[2023-07-23 05:02:05+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-23 05:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-23 06:01:03 +[2023-07-23 06:02:05+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-23 06:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-23 07:01:02 +[2023-07-23 07:02:05+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-23 07:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.NullPointerException + at org.apache.spark.SparkContext.<init>(SparkContext.scala:567) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-23 08:01:02 +[2023-07-23 08:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-23 08:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-23 09:01:02 +[2023-07-23 09:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-23 09:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.NullPointerException + at org.apache.spark.SparkContext.<init>(SparkContext.scala:567) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-23 10:01:02 +[2023-07-23 10:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-23 10:02:04+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-23 11:01:02 +[2023-07-23 11:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-23 11:02:04+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.NullPointerException + at org.apache.spark.storage.BlockManagerMaster.registerBlockManager(BlockManagerMaster.scala:64) + at org.apache.spark.storage.BlockManager.initialize(BlockManager.scala:227) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:518) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-23 12:01:02 +[2023-07-23 12:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-23 12:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.NullPointerException + at org.apache.spark.SparkContext.<init>(SparkContext.scala:567) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-23 13:01:02 +[2023-07-23 13:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-23 13:02:04+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-23 14:01:02 +[2023-07-23 14:02:05+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-23 14:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.NullPointerException + at org.apache.spark.SparkContext.<init>(SparkContext.scala:567) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-23 15:01:02 +[2023-07-23 15:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-23 15:02:04+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-23 16:01:02 +[2023-07-23 16:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-23 16:02:04+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-23 17:01:02 +[2023-07-23 17:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-23 17:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-23 18:01:02 +[2023-07-23 18:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-23 18:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.NullPointerException + at org.apache.spark.SparkContext.<init>(SparkContext.scala:567) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-23 19:01:02 +[2023-07-23 19:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-23 19:02:04+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-23 20:01:02 +[2023-07-23 20:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-23 20:02:04+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-23 21:01:02 +[2023-07-23 21:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-23 21:02:04+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-23 22:01:02 +[2023-07-23 22:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-23 22:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.NullPointerException + at org.apache.spark.SparkContext.<init>(SparkContext.scala:567) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-23 23:01:02 +[2023-07-23 23:02:05+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-23 23:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.NullPointerException + at org.apache.spark.SparkContext.<init>(SparkContext.scala:567) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-24 00:01:02 +[2023-07-24 00:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-24 00:02:04+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-24 01:01:02 +[2023-07-24 01:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-24 01:02:04+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.NullPointerException + at org.apache.spark.storage.BlockManagerMaster.registerBlockManager(BlockManagerMaster.scala:64) + at org.apache.spark.storage.BlockManager.initialize(BlockManager.scala:227) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:518) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-24 02:01:02 +[2023-07-24 02:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-24 02:02:04+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-24 03:01:02 +[2023-07-24 03:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-24 03:02:04+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-24 04:01:02 +[2023-07-24 04:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-24 04:02:04+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-24 05:01:02 +[2023-07-24 05:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-24 05:02:04+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-24 06:01:02 +[2023-07-24 06:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-24 06:02:04+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-24 07:01:02 +[2023-07-24 07:02:05+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-24 07:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-24 08:01:02 +[2023-07-24 08:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-24 08:02:04+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-24 09:01:02 +[2023-07-24 09:02:05+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-24 09:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.NullPointerException + at org.apache.spark.SparkContext.<init>(SparkContext.scala:567) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-24 10:01:02 +[2023-07-24 10:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-24 10:02:04+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +IpRecommendApplication 程序运行 - 2023-07-24 11:01:02 +[2023-07-24 11:02:04+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-24 11:02:05+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.NullPointerException + at org.apache.spark.SparkContext.<init>(SparkContext.scala:567) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexIpDf(BaseClickhouseData.scala:91) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexIp(MergeDataFrame.scala:31) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$ipRecommend$2.apply(UpdateDocument.scala:46) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.ipRecommend(UpdateDocument.scala:46) + at cn.ac.iie.main.IpRecommendApplication$.main(IpRecommendApplication.scala:8) + at cn.ac.iie.main.IpRecommendApplication.main(IpRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) diff --git a/MPE/spark/iplearning/logs/SubscriberRecommendApplication/running-2023-07.log b/MPE/spark/iplearning/logs/SubscriberRecommendApplication/running-2023-07.log new file mode 100644 index 0000000..05eddf4 --- /dev/null +++ b/MPE/spark/iplearning/logs/SubscriberRecommendApplication/running-2023-07.log @@ -0,0 +1,9673 @@ +SubscriberRecommendApplication 程序运行 - 2023-07-12 04:42:37 +[2023-07-12 04:43:39+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-12 04:43:39+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-12 05:02:05 +[2023-07-12 05:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-12 05:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-12 06:02:05 +[2023-07-12 06:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-12 06:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-12 07:02:05 +[2023-07-12 07:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-12 07:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-12 08:02:05 +[2023-07-12 08:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-12 08:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-12 09:02:05 +[2023-07-12 09:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-12 09:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-12 10:02:05 +[2023-07-12 10:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-12 10:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-12 11:02:05 +[2023-07-12 11:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-12 11:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.NullPointerException + at org.apache.spark.SparkContext.<init>(SparkContext.scala:567) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-12 12:02:05 +[2023-07-12 12:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-12 12:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-12 13:02:05 +[2023-07-12 13:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-12 13:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-12 14:02:05 +[2023-07-12 14:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-12 14:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-12 15:02:05 +[2023-07-12 15:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-12 15:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-12 16:02:05 +[2023-07-12 16:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-12 16:03:08+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-12 17:02:05 +[2023-07-12 17:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-12 17:03:08+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-12 18:02:05 +[2023-07-12 18:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-12 18:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-12 19:02:05 +[2023-07-12 19:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-12 19:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-12 20:02:05 +[2023-07-12 20:03:08+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-12 20:03:08+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-12 21:02:05 +[2023-07-12 21:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-12 21:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.NullPointerException + at org.apache.spark.SparkContext.<init>(SparkContext.scala:567) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-12 22:02:05 +[2023-07-12 22:03:08+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-12 22:03:08+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-12 23:02:05 +[2023-07-12 23:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-12 23:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-13 00:02:05 +[2023-07-13 00:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-13 00:03:08+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.NullPointerException + at org.apache.spark.SparkContext.<init>(SparkContext.scala:567) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-13 01:02:05 +[2023-07-13 01:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-13 01:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.NullPointerException + at org.apache.spark.SparkContext.<init>(SparkContext.scala:567) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-13 02:02:05 +[2023-07-13 02:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-13 02:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.NullPointerException + at org.apache.spark.SparkContext.<init>(SparkContext.scala:567) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-13 03:02:05 +[2023-07-13 03:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-13 03:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-13 04:02:05 +[2023-07-13 04:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-13 04:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.NullPointerException + at org.apache.spark.SparkContext.<init>(SparkContext.scala:567) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-13 05:02:05 +[2023-07-13 05:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-13 05:03:08+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.NullPointerException + at org.apache.spark.SparkContext.<init>(SparkContext.scala:567) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-13 06:02:05 +[2023-07-13 06:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-13 06:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-13 07:02:05 +[2023-07-13 07:03:08+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-13 07:03:08+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-13 08:02:05 +[2023-07-13 08:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-13 08:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-13 09:02:05 +[2023-07-13 09:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-13 09:03:08+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-13 10:02:05 +[2023-07-13 10:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-13 10:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-13 11:02:05 +[2023-07-13 11:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-13 11:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-13 12:02:05 +[2023-07-13 12:03:08+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-13 12:03:08+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-13 13:02:05 +[2023-07-13 13:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-13 13:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-13 14:02:05 +[2023-07-13 14:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-13 14:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-13 15:02:05 +[2023-07-13 15:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-13 15:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-13 16:02:05 +[2023-07-13 16:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-13 16:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-13 17:02:05 +[2023-07-13 17:03:08+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-13 17:03:08+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-13 18:02:04 +[2023-07-13 18:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-13 18:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-13 19:02:05 +[2023-07-13 19:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-13 19:03:08+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-13 20:02:05 +[2023-07-13 20:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-13 20:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-13 21:02:05 +[2023-07-13 21:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-13 21:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-13 22:02:05 +[2023-07-13 22:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-13 22:03:08+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-13 23:02:05 +[2023-07-13 23:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-13 23:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-14 00:02:05 +[2023-07-14 00:03:08+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-14 00:03:08+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-14 01:02:05 +[2023-07-14 01:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-14 01:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-14 02:02:05 +[2023-07-14 02:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-14 02:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-14 03:02:06 +[2023-07-14 03:03:08+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-14 03:03:08+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-14 04:02:05 +[2023-07-14 04:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-14 04:03:08+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-14 05:02:05 +[2023-07-14 05:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-14 05:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-14 06:02:05 +[2023-07-14 06:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-14 06:03:08+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.NullPointerException + at org.apache.spark.SparkContext.<init>(SparkContext.scala:567) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-14 07:02:05 +[2023-07-14 07:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-14 07:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-14 08:02:05 +[2023-07-14 08:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-14 08:03:08+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-14 09:02:05 +[2023-07-14 09:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-14 09:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-14 10:02:05 +[2023-07-14 10:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-14 10:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-14 11:02:06 +[2023-07-14 11:03:09+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-14 11:03:09+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-14 12:02:05 +[2023-07-14 12:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-14 12:03:08+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.NullPointerException + at org.apache.spark.SparkContext.<init>(SparkContext.scala:567) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-14 13:02:05 +[2023-07-14 13:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-14 13:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-14 14:02:05 +[2023-07-14 14:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-14 14:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-14 15:02:06 +[2023-07-14 15:03:08+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-14 15:03:09+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-14 16:02:05 +[2023-07-14 16:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-14 16:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-14 17:02:07 +[2023-07-14 17:03:09+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-14 17:03:10+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-14 18:02:05 +[2023-07-14 18:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-14 18:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-14 19:02:06 +[2023-07-14 19:03:08+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-14 19:03:08+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-14 20:02:05 +[2023-07-14 20:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-14 20:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-14 21:02:05 +[2023-07-14 21:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-14 21:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-14 22:02:05 +[2023-07-14 22:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-14 22:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-14 23:02:05 +[2023-07-14 23:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-14 23:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-15 00:02:05 +[2023-07-15 00:03:08+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-15 00:03:08+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +org.apache.spark.SparkException: Exception thrown in awaitResult: + at org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:205) + at org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75) + at org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:92) + at org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:76) + at org.apache.spark.storage.BlockManagerMaster.registerBlockManager(BlockManagerMaster.scala:64) + at org.apache.spark.storage.BlockManager.initialize(BlockManager.scala:227) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:518) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +Caused by: org.apache.spark.SparkException: Could not find BlockManagerMaster. + at org.apache.spark.rpc.netty.Dispatcher.postMessage(Dispatcher.scala:157) + at org.apache.spark.rpc.netty.Dispatcher.postLocalMessage(Dispatcher.scala:132) + at org.apache.spark.rpc.netty.NettyRpcEnv.ask(NettyRpcEnv.scala:228) + at org.apache.spark.rpc.netty.NettyRpcEndpointRef.ask(NettyRpcEnv.scala:522) + at org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:91) + ... 30 more +SubscriberRecommendApplication 程序运行 - 2023-07-15 01:02:06 +[2023-07-15 01:03:10+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-15 01:03:10+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-15 02:02:05 +[2023-07-15 02:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-15 02:03:08+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.NullPointerException + at org.apache.spark.SparkContext.<init>(SparkContext.scala:567) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-15 03:02:05 +[2023-07-15 03:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-15 03:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-15 04:02:05 +[2023-07-15 04:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-15 04:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-15 05:02:05 +[2023-07-15 05:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-15 05:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-15 06:02:05 +[2023-07-15 06:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-15 06:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-15 07:02:05 +[2023-07-15 07:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-15 07:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-15 08:02:05 +[2023-07-15 08:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-15 08:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-15 09:02:05 +[2023-07-15 09:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-15 09:03:08+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-15 10:02:05 +[2023-07-15 10:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-15 10:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-15 11:02:05 +[2023-07-15 11:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-15 11:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-15 12:02:05 +[2023-07-15 12:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-15 12:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-15 13:02:05 +[2023-07-15 13:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-15 13:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-15 14:02:05 +[2023-07-15 14:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-15 14:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-15 15:02:05 +[2023-07-15 15:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-15 15:03:08+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.NullPointerException + at org.apache.spark.SparkContext.<init>(SparkContext.scala:567) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-15 16:02:05 +[2023-07-15 16:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-15 16:03:08+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.NullPointerException + at org.apache.spark.SparkContext.<init>(SparkContext.scala:567) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-15 17:02:05 +[2023-07-15 17:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-15 17:03:08+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-15 18:02:05 +[2023-07-15 18:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-15 18:03:08+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-15 19:02:05 +[2023-07-15 19:03:08+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-15 19:03:08+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-15 20:02:05 +[2023-07-15 20:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-15 20:03:08+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-15 21:02:05 +[2023-07-15 21:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-15 21:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-15 22:02:05 +[2023-07-15 22:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-15 22:03:08+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-15 23:02:06 +[2023-07-15 23:03:08+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-15 23:03:08+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.NullPointerException + at org.apache.spark.storage.BlockManagerMaster.registerBlockManager(BlockManagerMaster.scala:64) + at org.apache.spark.storage.BlockManager.initialize(BlockManager.scala:227) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:518) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-16 00:02:05 +[2023-07-16 00:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-16 00:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-16 01:02:05 +[2023-07-16 01:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-16 01:03:08+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.NullPointerException + at org.apache.spark.SparkContext.<init>(SparkContext.scala:567) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-16 02:02:05 +[2023-07-16 02:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-16 02:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-16 03:02:05 +[2023-07-16 03:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-16 03:03:08+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.NullPointerException + at org.apache.spark.SparkContext.<init>(SparkContext.scala:567) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-16 04:02:05 +[2023-07-16 04:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-16 04:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-16 05:02:05 +[2023-07-16 05:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-16 05:03:08+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.NullPointerException + at org.apache.spark.SparkContext.<init>(SparkContext.scala:567) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-16 06:02:05 +[2023-07-16 06:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-16 06:03:08+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.NullPointerException + at org.apache.spark.SparkContext.<init>(SparkContext.scala:567) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-16 07:02:05 +[2023-07-16 07:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-16 07:03:08+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-16 08:02:06 +[2023-07-16 08:03:08+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-16 08:03:08+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-16 09:02:05 +[2023-07-16 09:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-16 09:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-16 10:02:05 +[2023-07-16 10:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-16 10:03:08+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-16 11:02:06 +[2023-07-16 11:03:08+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-16 11:03:08+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.NullPointerException + at org.apache.spark.SparkContext.<init>(SparkContext.scala:567) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-16 12:02:05 +[2023-07-16 12:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-16 12:03:08+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-16 13:02:05 +[2023-07-16 13:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-16 13:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-16 14:02:05 +[2023-07-16 14:03:08+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-16 14:03:08+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.NullPointerException + at org.apache.spark.storage.BlockManagerMaster.registerBlockManager(BlockManagerMaster.scala:64) + at org.apache.spark.storage.BlockManager.initialize(BlockManager.scala:227) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:518) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-16 15:02:05 +[2023-07-16 15:03:08+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-16 15:03:08+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-16 16:02:05 +[2023-07-16 16:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-16 16:03:08+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.NullPointerException + at org.apache.spark.SparkContext.<init>(SparkContext.scala:567) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-16 17:02:05 +[2023-07-16 17:03:08+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-16 17:03:08+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.NullPointerException + at org.apache.spark.SparkContext.<init>(SparkContext.scala:567) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-16 18:02:05 +[2023-07-16 18:03:08+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-16 18:03:08+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-16 19:02:05 +[2023-07-16 19:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-16 19:03:08+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-16 20:02:05 +[2023-07-16 20:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-16 20:03:08+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.NullPointerException + at org.apache.spark.SparkContext.<init>(SparkContext.scala:567) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-16 21:02:05 +[2023-07-16 21:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-16 21:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-16 22:02:05 +[2023-07-16 22:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-16 22:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-16 23:02:05 +[2023-07-16 23:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-16 23:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-17 00:02:05 +[2023-07-17 00:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-17 00:03:08+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-17 01:02:05 +[2023-07-17 01:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-17 01:03:08+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-17 02:02:05 +[2023-07-17 02:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-17 02:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-17 03:02:05 +[2023-07-17 03:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-17 03:03:08+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-17 04:02:05 +[2023-07-17 04:03:08+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-17 04:03:08+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-17 05:02:05 +[2023-07-17 05:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-17 05:03:08+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-17 06:02:05 +[2023-07-17 06:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-17 06:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-17 07:02:05 +[2023-07-17 07:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-17 07:03:08+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.NullPointerException + at org.apache.spark.SparkContext.<init>(SparkContext.scala:567) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-17 08:02:06 +[2023-07-17 08:03:08+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-17 08:03:08+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-17 09:02:05 +[2023-07-17 09:03:08+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-17 09:03:08+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-17 10:02:05 +[2023-07-17 10:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-17 10:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-17 11:02:05 +[2023-07-17 11:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-17 11:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-17 12:02:05 +[2023-07-17 12:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-17 12:03:08+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.NullPointerException + at org.apache.spark.SparkContext.<init>(SparkContext.scala:567) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-17 13:02:06 +[2023-07-17 13:03:08+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-17 13:03:08+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-17 14:02:05 +[2023-07-17 14:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-17 14:03:08+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-17 15:02:05 +[2023-07-17 15:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-17 15:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-17 16:02:05 +[2023-07-17 16:03:08+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-17 16:03:08+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-17 17:02:05 +[2023-07-17 17:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-17 17:03:08+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-17 18:02:05 +[2023-07-17 18:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-17 18:03:08+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-17 19:02:05 +[2023-07-17 19:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-17 19:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-17 20:02:05 +[2023-07-17 20:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-17 20:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-17 21:02:05 +[2023-07-17 21:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-17 21:03:08+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.NullPointerException + at org.apache.spark.SparkContext.<init>(SparkContext.scala:567) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-17 22:02:05 +[2023-07-17 22:03:08+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-17 22:03:08+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-17 23:02:05 +[2023-07-17 23:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-17 23:03:08+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.NullPointerException + at org.apache.spark.SparkContext.<init>(SparkContext.scala:567) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-18 00:02:05 +[2023-07-18 00:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-18 00:03:08+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.NullPointerException + at org.apache.spark.SparkContext.<init>(SparkContext.scala:567) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-18 01:02:05 +[2023-07-18 01:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-18 01:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-18 02:02:05 +[2023-07-18 02:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-18 02:03:08+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.NullPointerException + at org.apache.spark.SparkContext.<init>(SparkContext.scala:567) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-18 03:02:05 +[2023-07-18 03:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-18 03:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-18 04:02:05 +[2023-07-18 04:03:08+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-18 04:03:08+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.NullPointerException + at org.apache.spark.storage.BlockManagerMaster.registerBlockManager(BlockManagerMaster.scala:64) + at org.apache.spark.storage.BlockManager.initialize(BlockManager.scala:227) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:518) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-18 05:02:05 +[2023-07-18 05:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-18 05:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-18 06:02:05 +[2023-07-18 06:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-18 06:03:08+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-18 07:02:05 +[2023-07-18 07:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-18 07:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.NullPointerException + at org.apache.spark.SparkContext.<init>(SparkContext.scala:567) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-18 08:02:05 +[2023-07-18 08:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-18 08:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.NullPointerException + at org.apache.spark.SparkContext.<init>(SparkContext.scala:567) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-18 09:02:05 +[2023-07-18 09:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-18 09:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-18 10:02:05 +[2023-07-18 10:03:08+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-18 10:03:08+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-18 11:02:05 +[2023-07-18 11:03:08+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-18 11:03:08+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-18 12:02:05 +[2023-07-18 12:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-18 12:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.NullPointerException + at org.apache.spark.storage.BlockManagerMaster.registerBlockManager(BlockManagerMaster.scala:64) + at org.apache.spark.storage.BlockManager.initialize(BlockManager.scala:227) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:518) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-18 13:02:05 +[2023-07-18 13:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-18 13:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.NullPointerException + at org.apache.spark.SparkContext.<init>(SparkContext.scala:567) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-18 14:02:05 +[2023-07-18 14:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-18 14:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-18 15:02:04 +[2023-07-18 15:03:06+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-18 15:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-18 16:02:05 +[2023-07-18 16:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-18 16:03:08+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-18 17:02:05 +[2023-07-18 17:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-18 17:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-18 18:02:05 +[2023-07-18 18:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-18 18:03:08+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.NullPointerException + at org.apache.spark.SparkContext.<init>(SparkContext.scala:567) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-18 19:02:05 +[2023-07-18 19:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-18 19:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-18 20:02:05 +[2023-07-18 20:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-18 20:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-18 21:02:05 +[2023-07-18 21:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-18 21:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-18 22:02:05 +[2023-07-18 22:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-18 22:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-18 23:02:05 +[2023-07-18 23:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-18 23:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-19 00:02:05 +[2023-07-19 00:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-19 00:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-19 01:02:05 +[2023-07-19 01:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-19 01:03:08+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-19 02:02:05 +[2023-07-19 02:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-19 02:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-19 03:02:05 +[2023-07-19 03:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-19 03:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.NullPointerException + at org.apache.spark.SparkContext.<init>(SparkContext.scala:567) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-19 04:02:05 +[2023-07-19 04:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-19 04:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.NullPointerException + at org.apache.spark.storage.BlockManagerMaster.registerBlockManager(BlockManagerMaster.scala:64) + at org.apache.spark.storage.BlockManager.initialize(BlockManager.scala:227) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:518) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-19 05:02:05 +[2023-07-19 05:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-19 05:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-19 06:02:05 +[2023-07-19 06:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-19 06:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-19 07:02:06 +[2023-07-19 07:03:08+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-19 07:03:08+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-19 08:02:05 +[2023-07-19 08:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-19 08:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.NullPointerException + at org.apache.spark.storage.BlockManagerMaster.registerBlockManager(BlockManagerMaster.scala:64) + at org.apache.spark.storage.BlockManager.initialize(BlockManager.scala:227) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:518) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-19 09:02:05 +[2023-07-19 09:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-19 09:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.NullPointerException + at org.apache.spark.storage.BlockManagerMaster.registerBlockManager(BlockManagerMaster.scala:64) + at org.apache.spark.storage.BlockManager.initialize(BlockManager.scala:227) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:518) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-19 10:02:07 +[2023-07-19 10:03:10+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-19 10:03:10+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-19 11:02:05 +[2023-07-19 11:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-19 11:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-19 12:02:05 +[2023-07-19 12:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-19 12:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-19 13:02:05 +[2023-07-19 13:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-19 13:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-19 14:02:05 +[2023-07-19 14:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-19 14:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-19 15:02:05 +[2023-07-19 15:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-19 15:03:08+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.NullPointerException + at org.apache.spark.SparkContext.<init>(SparkContext.scala:567) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-19 16:02:05 +[2023-07-19 16:03:08+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-19 16:03:08+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.NullPointerException + at org.apache.spark.SparkContext.<init>(SparkContext.scala:567) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-19 17:02:05 +[2023-07-19 17:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-19 17:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-19 18:02:05 +[2023-07-19 18:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-19 18:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.NullPointerException + at org.apache.spark.SparkContext.<init>(SparkContext.scala:567) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-19 19:02:05 +[2023-07-19 19:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-19 19:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-19 20:02:05 +[2023-07-19 20:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-19 20:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-19 21:02:05 +[2023-07-19 21:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-19 21:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-19 22:02:05 +[2023-07-19 22:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-19 22:03:08+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-19 23:02:05 +[2023-07-19 23:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-19 23:03:08+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-20 00:02:05 +[2023-07-20 00:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-20 00:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-20 01:02:05 +[2023-07-20 01:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-20 01:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.NullPointerException + at org.apache.spark.SparkContext.<init>(SparkContext.scala:567) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-20 02:02:05 +[2023-07-20 02:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-20 02:03:08+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.NullPointerException + at org.apache.spark.SparkContext.<init>(SparkContext.scala:567) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-20 03:02:05 +[2023-07-20 03:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-20 03:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-20 04:02:05 +[2023-07-20 04:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-20 04:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-20 05:02:05 +[2023-07-20 05:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-20 05:03:08+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.NullPointerException + at org.apache.spark.SparkContext.<init>(SparkContext.scala:567) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-20 06:02:05 +[2023-07-20 06:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-20 06:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-20 07:02:05 +[2023-07-20 07:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-20 07:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-20 08:02:05 +[2023-07-20 08:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-20 08:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-20 09:02:05 +[2023-07-20 09:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-20 09:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-20 10:02:05 +[2023-07-20 10:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-20 10:03:08+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.NullPointerException + at org.apache.spark.SparkContext.<init>(SparkContext.scala:567) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-20 11:02:05 +[2023-07-20 11:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-20 11:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-20 12:02:05 +[2023-07-20 12:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-20 12:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-20 13:02:05 +[2023-07-20 13:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-20 13:03:08+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.NullPointerException + at org.apache.spark.SparkContext.<init>(SparkContext.scala:567) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-20 14:02:05 +[2023-07-20 14:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-20 14:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-20 15:02:05 +[2023-07-20 15:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-20 15:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-20 16:02:05 +[2023-07-20 16:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-20 16:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-20 17:02:05 +[2023-07-20 17:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-20 17:03:08+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.NullPointerException + at org.apache.spark.SparkContext.<init>(SparkContext.scala:567) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-20 18:02:05 +[2023-07-20 18:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-20 18:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-20 19:02:05 +[2023-07-20 19:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-20 19:03:08+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.NullPointerException + at org.apache.spark.SparkContext.<init>(SparkContext.scala:567) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-20 20:02:05 +[2023-07-20 20:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-20 20:03:08+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-20 21:02:05 +[2023-07-20 21:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-20 21:03:08+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.NullPointerException + at org.apache.spark.SparkContext.<init>(SparkContext.scala:567) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-20 22:02:05 +[2023-07-20 22:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-20 22:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-20 23:02:05 +[2023-07-20 23:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-20 23:03:08+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.NullPointerException + at org.apache.spark.SparkContext.<init>(SparkContext.scala:567) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-21 00:02:05 +[2023-07-21 00:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-21 00:03:08+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-21 01:02:05 +[2023-07-21 01:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-21 01:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-21 02:02:05 +[2023-07-21 02:03:08+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-21 02:03:08+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-21 03:02:05 +[2023-07-21 03:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-21 03:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-21 04:02:05 +[2023-07-21 04:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-21 04:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-21 05:02:05 +[2023-07-21 05:03:08+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-21 05:03:08+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-21 06:02:05 +[2023-07-21 06:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-21 06:03:08+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.NullPointerException + at org.apache.spark.storage.BlockManagerMaster.registerBlockManager(BlockManagerMaster.scala:64) + at org.apache.spark.storage.BlockManager.initialize(BlockManager.scala:227) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:518) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-21 07:02:05 +[2023-07-21 07:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-21 07:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-21 08:02:05 +[2023-07-21 08:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-21 08:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-21 09:02:05 +[2023-07-21 09:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-21 09:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-21 10:02:05 +[2023-07-21 10:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-21 10:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-21 11:02:05 +[2023-07-21 11:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-21 11:03:08+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-21 12:02:05 +[2023-07-21 12:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-21 12:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-21 13:02:05 +[2023-07-21 13:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-21 13:03:08+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-21 14:02:05 +[2023-07-21 14:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-21 14:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.NullPointerException + at org.apache.spark.SparkContext.<init>(SparkContext.scala:567) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-21 15:02:05 +[2023-07-21 15:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-21 15:03:08+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-21 16:02:05 +[2023-07-21 16:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-21 16:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-21 17:02:05 +[2023-07-21 17:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-21 17:03:08+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-21 18:02:05 +[2023-07-21 18:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-21 18:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-21 19:02:05 +[2023-07-21 19:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-21 19:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-21 20:02:05 +[2023-07-21 20:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-21 20:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-21 21:02:05 +[2023-07-21 21:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-21 21:03:08+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.NullPointerException + at org.apache.spark.SparkContext.<init>(SparkContext.scala:567) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-21 22:02:05 +[2023-07-21 22:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-21 22:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.NullPointerException + at org.apache.spark.SparkContext.<init>(SparkContext.scala:567) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-21 23:02:05 +[2023-07-21 23:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-21 23:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-22 00:02:05 +[2023-07-22 00:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-22 00:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-22 01:02:05 +[2023-07-22 01:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-22 01:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-22 02:02:05 +[2023-07-22 02:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-22 02:03:08+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-22 03:02:05 +[2023-07-22 03:03:08+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-22 03:03:08+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-22 04:02:06 +[2023-07-22 04:03:08+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-22 04:03:08+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-22 05:02:05 +[2023-07-22 05:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-22 05:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-22 06:02:05 +[2023-07-22 06:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-22 06:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-22 07:02:05 +[2023-07-22 07:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-22 07:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-22 08:02:05 +[2023-07-22 08:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-22 08:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-22 09:02:05 +[2023-07-22 09:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-22 09:03:08+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.NullPointerException + at org.apache.spark.SparkContext.<init>(SparkContext.scala:567) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-22 10:02:05 +[2023-07-22 10:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-22 10:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-22 11:02:06 +[2023-07-22 11:03:08+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-22 11:03:08+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-22 12:02:05 +[2023-07-22 12:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-22 12:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-22 13:02:05 +[2023-07-22 13:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-22 13:03:08+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.NullPointerException + at org.apache.spark.SparkContext.<init>(SparkContext.scala:567) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-22 14:02:05 +[2023-07-22 14:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-22 14:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-22 15:02:05 +[2023-07-22 15:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-22 15:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-22 16:02:05 +[2023-07-22 16:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-22 16:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-22 17:02:05 +[2023-07-22 17:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-22 17:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-22 18:02:05 +[2023-07-22 18:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-22 18:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-22 19:02:05 +[2023-07-22 19:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-22 19:03:08+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.NullPointerException + at org.apache.spark.SparkContext.<init>(SparkContext.scala:567) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-22 20:02:05 +[2023-07-22 20:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-22 20:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-22 21:02:05 +[2023-07-22 21:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-22 21:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-22 22:02:05 +[2023-07-22 22:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-22 22:03:08+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-22 23:02:05 +[2023-07-22 23:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-22 23:03:08+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-23 00:02:05 +[2023-07-23 00:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-23 00:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.NullPointerException + at org.apache.spark.storage.BlockManagerMaster.registerBlockManager(BlockManagerMaster.scala:64) + at org.apache.spark.storage.BlockManager.initialize(BlockManager.scala:227) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:518) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-23 01:02:06 +[2023-07-23 01:03:08+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-23 01:03:08+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-23 02:02:05 +[2023-07-23 02:03:08+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-23 02:03:08+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-23 03:02:05 +[2023-07-23 03:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-23 03:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-23 04:02:05 +[2023-07-23 04:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-23 04:03:08+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-23 05:02:05 +[2023-07-23 05:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-23 05:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-23 06:02:05 +[2023-07-23 06:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-23 06:03:08+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-23 07:02:05 +[2023-07-23 07:03:08+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-23 07:03:08+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.NullPointerException + at org.apache.spark.storage.BlockManagerMaster.registerBlockManager(BlockManagerMaster.scala:64) + at org.apache.spark.storage.BlockManager.initialize(BlockManager.scala:227) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:518) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-23 08:02:05 +[2023-07-23 08:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-23 08:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-23 09:02:05 +[2023-07-23 09:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-23 09:03:08+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-23 10:02:05 +[2023-07-23 10:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-23 10:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-23 11:02:05 +[2023-07-23 11:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-23 11:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-23 12:02:05 +[2023-07-23 12:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-23 12:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-23 13:02:05 +[2023-07-23 13:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-23 13:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-23 14:02:06 +[2023-07-23 14:03:08+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-23 14:03:08+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-23 15:02:05 +[2023-07-23 15:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-23 15:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.NullPointerException + at org.apache.spark.SparkContext.<init>(SparkContext.scala:567) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-23 16:02:05 +[2023-07-23 16:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-23 16:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-23 17:02:05 +[2023-07-23 17:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-23 17:03:08+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-23 18:02:05 +[2023-07-23 18:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-23 18:03:08+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-23 19:02:05 +[2023-07-23 19:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-23 19:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-23 20:02:05 +[2023-07-23 20:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-23 20:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-23 21:02:05 +[2023-07-23 21:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-23 21:03:08+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.NullPointerException + at org.apache.spark.SparkContext.<init>(SparkContext.scala:567) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-23 22:02:05 +[2023-07-23 22:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-23 22:03:08+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-23 23:02:06 +[2023-07-23 23:03:08+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-23 23:03:08+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.NullPointerException + at org.apache.spark.SparkContext.<init>(SparkContext.scala:567) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-24 00:02:05 +[2023-07-24 00:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-24 00:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-24 01:02:05 +[2023-07-24 01:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-24 01:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-24 02:02:05 +[2023-07-24 02:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-24 02:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-24 03:02:05 +[2023-07-24 03:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-24 03:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-24 04:02:05 +[2023-07-24 04:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-24 04:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-24 05:02:05 +[2023-07-24 05:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-24 05:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-24 06:02:05 +[2023-07-24 06:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-24 06:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-24 07:02:05 +[2023-07-24 07:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-24 07:03:08+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-24 08:02:05 +[2023-07-24 08:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-24 08:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.NullPointerException + at org.apache.spark.SparkContext.<init>(SparkContext.scala:567) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-24 09:02:06 +[2023-07-24 09:03:08+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-24 09:03:08+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-24 10:02:05 +[2023-07-24 10:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-24 10:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) +SubscriberRecommendApplication 程序运行 - 2023-07-24 11:02:05 +[2023-07-24 11:03:07+0000] [ERROR] [Thread:appclient-registration-retry-thread] org.apache.spark.internal.Logging$class.logError(Logging.scala:70) - <Application has been killed. Reason: All masters are unresponsive! Giving up.> +[2023-07-24 11:03:07+0000] [ERROR] [Thread:main] org.apache.spark.internal.Logging$class.logError(Logging.scala:91) - <Error initializing SparkContext.> +java.lang.IllegalArgumentException: requirement failed: Can only call getServletHandlers on a running MetricsSystem + at scala.Predef$.require(Predef.scala:224) + at org.apache.spark.metrics.MetricsSystem.getServletHandlers(MetricsSystem.scala:91) + at org.apache.spark.SparkContext.<init>(SparkContext.scala:524) + at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2516) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:918) + at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:910) + at scala.Option.getOrElse(Option.scala:121) + at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:910) + at cn.ac.iie.utils.SparkSessionUtil$.getSparkSession(SparkSessionUtil.scala:30) + at cn.ac.iie.utils.SparkSessionUtil$.<init>(SparkSessionUtil.scala:12) + at cn.ac.iie.utils.SparkSessionUtil$.<clinit>(SparkSessionUtil.scala) + at cn.ac.iie.dao.BaseClickhouseData$.initClickhouseData(BaseClickhouseData.scala:19) + at cn.ac.iie.dao.BaseClickhouseData$.getVertexSubidDf(BaseClickhouseData.scala:162) + at cn.ac.iie.service.transform.MergeDataFrame$.mergeVertexSubid(MergeDataFrame.scala:90) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$$anonfun$subscriberRecommend$2.apply(UpdateDocument.scala:33) + at cn.ac.iie.service.update.UpdateDocument$.updateDocument(UpdateDocument.scala:61) + at cn.ac.iie.service.update.UpdateDocument$.subscriberRecommend(UpdateDocument.scala:33) + at cn.ac.iie.main.SubscriberRecommendApplication$.main(SubscriberRecommendApplication.scala:8) + at cn.ac.iie.main.SubscriberRecommendApplication.main(SubscriberRecommendApplication.scala) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:498) + at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775) + at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) + at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) + at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119) + at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) diff --git a/MPE/spark/iplearning/runnum b/MPE/spark/iplearning/runnum new file mode 100644 index 0000000..d00491f --- /dev/null +++ b/MPE/spark/iplearning/runnum @@ -0,0 +1 @@ +1 diff --git a/MPE/spark/iplearning/upconfig.sh b/MPE/spark/iplearning/upconfig.sh new file mode 100644 index 0000000..1527ffb --- /dev/null +++ b/MPE/spark/iplearning/upconfig.sh @@ -0,0 +1,7 @@ +#!/bin/bash + +source /etc/profile + +jar_name="ip-learning-spark.jar" + +jar -uvf $jar_name application.properties |
