diff options
| author | gujinkai <[email protected]> | 2023-11-21 18:37:34 +0800 |
|---|---|---|
| committer | gujinkai <[email protected]> | 2023-11-22 10:34:47 +0800 |
| commit | 70ccd954bff5d87e9f1ee5e8f8993b60b693519e (patch) | |
| tree | 324234965b3d94f669b4f601ca2e06f27be2ebd8 | |
| parent | f632ee385786054ab4796311e16d12b1de3e6433 (diff) | |
chorm: remove unused declared dependencies
4 files changed, 45 insertions, 264 deletions
diff --git a/platform-base/src/main/java/com/zdjizhi/base/utils/FlinkEnvironmentUtils.java b/platform-base/src/main/java/com/zdjizhi/base/utils/FlinkEnvironmentUtils.java index 8490615..48e3fe8 100644 --- a/platform-base/src/main/java/com/zdjizhi/base/utils/FlinkEnvironmentUtils.java +++ b/platform-base/src/main/java/com/zdjizhi/base/utils/FlinkEnvironmentUtils.java @@ -4,12 +4,9 @@ import com.zdjizhi.base.common.CommonConfig; import com.zdjizhi.base.config.Configs; import org.apache.flink.api.common.eventtime.SerializableTimestampAssigner; import org.apache.flink.api.common.eventtime.WatermarkStrategy; -import org.apache.flink.configuration.Configuration; -import org.apache.flink.configuration.RestOptions; import org.apache.flink.streaming.api.CheckpointingMode; import org.apache.flink.streaming.api.environment.CheckpointConfig; import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment; -import org.apache.flink.table.api.bridge.java.StreamTableEnvironment; import java.time.Duration; @@ -19,7 +16,6 @@ import java.time.Duration; */ public class FlinkEnvironmentUtils { public static StreamExecutionEnvironment streamExeEnv = StreamExecutionEnvironment.getExecutionEnvironment(); - public static StreamTableEnvironment tableEnvironment = StreamTableEnvironment.create(streamExeEnv); static { streamExeEnv.setParallelism(Configs.get(CommonConfig.STREAM_EXECUTION_ENVIRONMENT_PARALLELISM)); @@ -51,14 +47,4 @@ public class FlinkEnvironmentUtils { .<T>forBoundedOutOfOrderness(maxOutOfOrderness) .withTimestampAssigner(timestampAssigner); } - - /* - public static StreamTableEnvironment getTableEnv() { - if (tableEnvironment == null) { - tableEnvironment = StreamTableEnvironment.create(streamExeEnv); - } - return tableEnvironment; - } - */ - } diff --git a/platform-base/src/main/java/com/zdjizhi/base/utils/RetryRequestFailureHandler.java b/platform-base/src/main/java/com/zdjizhi/base/utils/RetryRequestFailureHandler.java deleted file mode 100644 index fdbf9ee..0000000 --- a/platform-base/src/main/java/com/zdjizhi/base/utils/RetryRequestFailureHandler.java +++ /dev/null @@ -1,47 +0,0 @@ -package com.zdjizhi.base.utils; - -import org.apache.flink.streaming.connectors.elasticsearch.ActionRequestFailureHandler; -import org.apache.flink.streaming.connectors.elasticsearch.RequestIndexer; -import org.apache.flink.util.ExceptionUtils; -import org.elasticsearch.action.ActionRequest; -import org.elasticsearch.action.delete.DeleteRequest; -import org.elasticsearch.action.index.IndexRequest; -import org.elasticsearch.action.update.UpdateRequest; -import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException; - -import java.io.IOException; -import java.net.SocketTimeoutException; -import java.util.Optional; - -public class RetryRequestFailureHandler implements ActionRequestFailureHandler { - - @Override - public void onFailure(ActionRequest actionRequest, Throwable throwable, int i, RequestIndexer requestIndexer) throws Throwable { - if (ExceptionUtils.findThrowable(throwable, EsRejectedExecutionException.class).isPresent()) { - if (actionRequest instanceof IndexRequest) { - requestIndexer.add((IndexRequest) actionRequest); - } else if (actionRequest instanceof DeleteRequest) { - requestIndexer.add((DeleteRequest) actionRequest); - } else if (actionRequest instanceof UpdateRequest) { - requestIndexer.add((UpdateRequest) actionRequest); - } else { - throw new IllegalArgumentException("RequestIndexer only supports Index, Delete and Update requests"); - } - } else { - if (ExceptionUtils.findThrowable(throwable, SocketTimeoutException.class).isPresent()) { - return; - } else { - Optional<IOException> exp = ExceptionUtils.findThrowable(throwable, IOException.class); - if (exp.isPresent()) { - IOException ioExp = exp.get(); - if (ioExp.getMessage() != null && ioExp.getMessage().contains("max retry timeout")) { -// log.error(ioExp.getMessage()); - return; - } - } - } - throw throwable; - } - } - -} diff --git a/platform-base/src/main/java/com/zdjizhi/base/utils/SqlSubmit.java b/platform-base/src/main/java/com/zdjizhi/base/utils/SqlSubmit.java deleted file mode 100644 index 4bac88e..0000000 --- a/platform-base/src/main/java/com/zdjizhi/base/utils/SqlSubmit.java +++ /dev/null @@ -1,106 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package com.zdjizhi.base.utils; - -import org.apache.flink.table.api.SqlParserException; -import org.apache.flink.table.api.StatementSet; -import org.apache.flink.table.api.bridge.java.StreamTableEnvironment; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.util.List; - -import static com.zdjizhi.base.utils.FlinkEnvironmentUtils.tableEnvironment; - -public class SqlSubmit { - - private static Logger LOG = LoggerFactory.getLogger(SqlSubmit.class); - - private List<String> sqllist; - - private StatementSet stmtSet; - - private StreamTableEnvironment tEnv = tableEnvironment; - - public SqlSubmit(List<String> sqllist) { - this.sqllist=sqllist; - } - - public void run() throws Exception { - stmtSet = tEnv.createStatementSet(); - List<SqlCommandParserUtils.SqlCommandCall> scc = SqlCommandParserUtils.parse(sqllist); - - for(SqlCommandParserUtils.SqlCommandCall sqlCommandCall:scc){ - callCommand(sqlCommandCall); - } -// stmtSet.attachAsDataStream(); - } - - private void callCommand(SqlCommandParserUtils.SqlCommandCall cmdCall) { - - LOG.info("sql {}", cmdCall.operands[0]); - - switch (cmdCall.command) { - case SET: - callSet(cmdCall); - break; - case CREATE_TABLE: - callCreateTable(cmdCall); - break; - case CREATE_VIEW: - callCreateTable(cmdCall); - break; - case CREATE_TEMPORARY_VIEW: - callCreateTable(cmdCall); - break; - case INSERT_INTO: - callInsertInto(cmdCall); - break; - default: - LOG.error("Unsupported command {} .", "error",cmdCall.command); - throw new RuntimeException("Unsupported command: " + cmdCall.command); - } - } - - private void callSet(SqlCommandParserUtils.SqlCommandCall cmdCall) { - String key = cmdCall.operands[0]; - String value = cmdCall.operands[1]; - tEnv.getConfig().getConfiguration().setString(key, value); - } - - private void callCreateTable(SqlCommandParserUtils.SqlCommandCall cmdCall) { - String ddl = cmdCall.operands[0]; - try { - tEnv.executeSql(ddl); - } catch (SqlParserException e) { - LOG.error("SQL parse failed {} .", "error", e); - throw new RuntimeException("SQL parse failed:\n" + ddl + "\n", e); - } - } - - private void callInsertInto(SqlCommandParserUtils.SqlCommandCall cmdCall) { - String dml = cmdCall.operands[0]; - try { - stmtSet.addInsertSql(dml); - } catch (SqlParserException e) { - LOG.error("SQL parse failed {} .", "error", e); - throw new RuntimeException("SQL parse failed:\n" + dml + "\n", e); - } - } -} @@ -62,6 +62,18 @@ <dependencies> <dependency> + <groupId>com.google.guava</groupId> + <artifactId>guava</artifactId> + <version>23.0</version> + </dependency> + + <dependency> + <groupId>org.apache.httpcomponents</groupId> + <artifactId>httpclient</artifactId> + <version>4.5.10</version> + </dependency> + + <dependency> <groupId>com.alibaba.fastjson2</groupId> <artifactId>fastjson2</artifactId> <version>2.0.32</version> @@ -108,23 +120,6 @@ </dependency> <dependency> - <groupId>junit</groupId> - <artifactId>junit</artifactId> - <version>4.12</version> - </dependency> - <dependency> - <groupId>org.apache.flink</groupId> - <artifactId>flink-connector-elasticsearch7_2.11</artifactId> - <version>${flink.version}</version> - <exclusions> - <exclusion> - <artifactId>log4j-api</artifactId> - <groupId>org.apache.logging.log4j</groupId> - </exclusion> - </exclusions> - </dependency> - - <dependency> <groupId>org.apache.flink</groupId> <artifactId>flink-clients_2.12</artifactId> <version>${flink.version}</version> @@ -137,48 +132,6 @@ </dependency> <dependency> - <groupId>org.apache.flink</groupId> - <artifactId>flink-table-planner-blink_2.12</artifactId> - <version>${flink.version}</version> - <exclusions> - <exclusion> - <artifactId>slf4j-api</artifactId> - <groupId>org.slf4j</groupId> - </exclusion> - </exclusions> - </dependency> - - <dependency> - <groupId>org.apache.flink</groupId> - <artifactId>flink-json</artifactId> - <version>${flink.version}</version> - </dependency> - - <dependency> - <groupId>org.apache.flink</groupId> - <artifactId>flink-table-api-java-bridge_2.12</artifactId> - <version>${flink.version}</version> - <exclusions> - <exclusion> - <artifactId>slf4j-api</artifactId> - <groupId>org.slf4j</groupId> - </exclusion> - </exclusions> - </dependency> - - <!--<dependency> - <groupId>com.zdjizhi</groupId> - <artifactId>galaxy</artifactId> - <version>${zdjizhi.version}</version> - <exclusions> - <exclusion> - <artifactId>httpclient</artifactId> - <groupId>org.apache.httpcomponents</groupId> - </exclusion> - </exclusions> - </dependency>--> - - <dependency> <groupId>com.geedgenetworks</groupId> <artifactId>galaxy</artifactId> <version>${zdjizhi.version}</version> @@ -224,6 +177,24 @@ <version>5.3.3</version> </dependency> + <dependency> + <groupId>org.apache.flink</groupId> + <artifactId>flink-runtime_${scala.version}</artifactId> + <version>${flink.version}</version> + <exclusions> + <exclusion> + <artifactId>slf4j-api</artifactId> + <groupId>org.slf4j</groupId> + </exclusion> + </exclusions> + </dependency> + + <dependency> + <groupId>org.apache.logging.log4j</groupId> + <artifactId>log4j-slf4j-impl</artifactId> + <version>2.18.0</version> + </dependency> + <!--hadoop hdfs--> <dependency> <groupId>org.apache.hadoop</groupId> @@ -261,6 +232,7 @@ </exclusion> </exclusions> </dependency> + <dependency> <groupId>org.apache.hadoop</groupId> <artifactId>hadoop-hdfs</artifactId> @@ -279,6 +251,13 @@ </dependency> <dependency> + <groupId>junit</groupId> + <artifactId>junit</artifactId> + <version>4.12</version> + <scope>test</scope> + </dependency> + + <dependency> <groupId>org.apache.flink</groupId> <artifactId>flink-test-utils_${scala.version}</artifactId> <version>${flink.version}</version> @@ -296,19 +275,17 @@ <artifactId>log4j-slf4j-impl</artifactId> <groupId>org.apache.logging.log4j</groupId> </exclusion> - </exclusions> - </dependency> - <dependency> - <groupId>org.apache.flink</groupId> - <artifactId>flink-runtime_${scala.version}</artifactId> - <version>${flink.version}</version> - <exclusions> <exclusion> - <artifactId>slf4j-api</artifactId> - <groupId>org.slf4j</groupId> + <artifactId>log4j-core</artifactId> + <groupId>org.apache.logging.log4j</groupId> + </exclusion> + <exclusion> + <artifactId>log4j-api</artifactId> + <groupId>org.apache.logging.log4j</groupId> </exclusion> </exclusions> </dependency> + <dependency> <groupId>org.apache.flink</groupId> <artifactId>flink-streaming-java_${scala.version}</artifactId> @@ -322,35 +299,6 @@ </exclusions> <classifier>tests</classifier> </dependency> - - <dependency> - <groupId>org.slf4j</groupId> - <artifactId>slf4j-api</artifactId> - <version>1.7.13</version> - </dependency> - - <dependency> - <groupId>org.apache.logging.log4j</groupId> - <artifactId>log4j-core</artifactId> - <version>2.18.0</version> - </dependency> - <dependency> - <groupId>org.apache.logging.log4j</groupId> - <artifactId>log4j-api</artifactId> - <version>2.18.0</version> - </dependency> - <dependency> - <groupId>org.apache.logging.log4j</groupId> - <artifactId>log4j-slf4j-impl</artifactId> - <version>2.18.0</version> - </dependency> - - <dependency> - <groupId>com.google.guava</groupId> - <artifactId>guava</artifactId> - <version>23.0</version> - </dependency> - </dependencies> <dependencyManagement> |
