From 53d7c1778d15ff3dd9b4c411799455592990d03f Mon Sep 17 00:00:00 2001 From: doufenghu Date: Sat, 23 Nov 2024 19:23:16 +0800 Subject: [Improve][e2e] Rename all e2e test modules to adapt to changes in the API operators. --- groot-tests/pom.xml | 8 +- groot-tests/test-common/pom.xml | 45 --- .../test/common/AbstractFlinkContainer.java | 37 -- .../geedgenetworks/test/common/TestResource.java | 27 -- .../geedgenetworks/test/common/TestSuiteBase.java | 29 -- .../common/container/AbstractTestContainer.java | 192 ----------- .../container/AbstractTestFlinkContainer.java | 158 --------- .../common/container/ContainerExtendedFactory.java | 11 - .../test/common/container/ContainerUtil.java | 355 ------------------- .../test/common/container/EngineType.java | 16 - .../test/common/container/Flink13Container.java | 47 --- .../test/common/container/Flink17Container.java | 43 --- .../test/common/container/TestContainer.java | 36 -- .../test/common/container/TestContainerId.java | 20 -- .../common/container/TestContainersFactory.java | 7 - .../test/common/container/TestHelper.java | 23 -- .../test/common/junit/AnnotationUtil.java | 37 -- .../common/junit/ContainerTestingExtension.java | 84 ----- .../test/common/junit/DisabledOnContainer.java | 22 -- .../junit/TestCaseInvocationContextProvider.java | 114 ------- .../test/common/junit/TestContainerExtension.java | 12 - .../test/common/junit/TestContainers.java | 11 - .../test/common/junit/TestLoggerExtension.java | 60 ---- .../src/test/resources/grootstream.yaml | 14 - .../src/test/resources/log4j2.properties | 42 --- groot-tests/test-e2e-base/pom.xml | 49 --- .../test/e2e/base/EnvParameterIT.java | 198 ----------- .../test/e2e/base/Flink13Container.java | 34 -- .../test/e2e/base/InlineToPrintIT.java | 153 --------- .../geedgenetworks/test/e2e/base/TestUtils.java | 21 -- .../src/test/resources/inline_to_print.yaml | 232 ------------- .../src/test/resources/kafka_to_print.yaml | 40 --- .../test_env_parameter_inline_to_print.yaml | 47 --- groot-tests/test-e2e-clickhouse/pom.xml | 88 ----- .../test/e2e/clickhouse/ClickHouseIT.java | 353 ------------------- .../test/resources/clickhouse_data_type_sink.yaml | 79 ----- .../test/resources/init/clickhouse_test_sql.conf | 81 ----- .../src/test/resources/init/init-clickhouse.sql | 4 - .../src/test/resources/init/users.xml | 29 -- groot-tests/test-e2e-common/pom.xml | 45 +++ .../test/e2e/common/AbstractFlinkContainer.java | 37 ++ .../test/e2e/common/TestResource.java | 27 ++ .../test/e2e/common/TestSuiteBase.java | 29 ++ .../common/container/AbstractTestContainer.java | 192 +++++++++++ .../container/AbstractTestFlinkContainer.java | 157 +++++++++ .../common/container/ContainerExtendedFactory.java | 11 + .../test/e2e/common/container/ContainerUtil.java | 366 ++++++++++++++++++++ .../test/e2e/common/container/EngineType.java | 16 + .../e2e/common/container/Flink13Container.java | 47 +++ .../e2e/common/container/Flink17Container.java | 43 +++ .../test/e2e/common/container/TestContainer.java | 36 ++ .../test/e2e/common/container/TestContainerId.java | 20 ++ .../common/container/TestContainersFactory.java | 7 + .../test/e2e/common/container/TestHelper.java | 23 ++ .../test/e2e/common/junit/AnnotationUtil.java | 37 ++ .../common/junit/ContainerTestingExtension.java | 84 +++++ .../test/e2e/common/junit/DisabledOnContainer.java | 22 ++ .../junit/TestCaseInvocationContextProvider.java | 114 +++++++ .../e2e/common/junit/TestContainerExtension.java | 12 + .../test/e2e/common/junit/TestContainers.java | 11 + .../test/e2e/common/junit/TestLoggerExtension.java | 60 ++++ .../src/test/resources/grootstream.yaml | 14 + .../src/test/resources/log4j2.properties | 42 +++ groot-tests/test-e2e-connector-clickhouse/pom.xml | 88 +++++ .../e2e/connector/clickhouse/ClickHouseIT.java | 349 +++++++++++++++++++ .../test/resources/clickhouse_data_type_sink.yaml | 79 +++++ .../test/resources/init/clickhouse_test_sql.conf | 81 +++++ .../src/test/resources/init/init-clickhouse.sql | 4 + .../src/test/resources/init/users.xml | 29 ++ groot-tests/test-e2e-connector-kafka/pom.xml | 63 ++++ .../test/e2e/connector/kafka/KafkaIT.java | 374 +++++++++++++++++++++ .../resources/kafka_client_jass_cli.properties | 3 + .../src/test/resources/kafka_producer_quota.yaml | 120 +++++++ .../src/test/resources/kafka_server_jaas.conf | 8 + .../src/test/resources/kafka_sink.yaml | 68 ++++ .../kafka_sink_handle_error_json_format.yaml | 67 ++++ .../kafka_sink_skip_error_json_format.yaml | 67 ++++ .../src/test/resources/kafka_source.yaml | 41 +++ .../test/resources/kafka_source_error_schema.yaml | 42 +++ groot-tests/test-e2e-core/pom.xml | 49 +++ .../test/e2e/core/EnvParameterIT.java | 198 +++++++++++ .../test/e2e/core/Flink13Container.java | 34 ++ .../test/e2e/core/InlineToPrintIT.java | 150 +++++++++ .../geedgenetworks/test/e2e/core/ProcessorIT.java | 69 ++++ .../geedgenetworks/test/e2e/core/TestUtils.java | 21 ++ .../src/test/resources/inline_to_print.yaml | 232 +++++++++++++ .../test/resources/job_aggregate_processor.yaml | 76 +++++ .../src/test/resources/job_split_processor.yaml | 99 ++++++ .../test_env_parameter_inline_to_print.yaml | 47 +++ groot-tests/test-e2e-kafka/pom.xml | 63 ---- .../com/geedgenetworks/test/e2e/kafka/KafkaIT.java | 374 --------------------- .../resources/kafka_client_jass_cli.properties | 3 - .../src/test/resources/kafka_producer_quota.yaml | 120 ------- .../src/test/resources/kafka_server_jaas.conf | 8 - .../src/test/resources/kafka_sink.yaml | 68 ---- .../kafka_sink_handle_error_json_format.yaml | 67 ---- .../kafka_sink_skip_error_json_format.yaml | 67 ---- .../src/test/resources/kafka_source.yaml | 41 --- .../test/resources/kafka_source_error_schema.yaml | 42 --- 99 files changed, 3914 insertions(+), 3707 deletions(-) delete mode 100644 groot-tests/test-common/pom.xml delete mode 100644 groot-tests/test-common/src/test/java/com/geedgenetworks/test/common/AbstractFlinkContainer.java delete mode 100644 groot-tests/test-common/src/test/java/com/geedgenetworks/test/common/TestResource.java delete mode 100644 groot-tests/test-common/src/test/java/com/geedgenetworks/test/common/TestSuiteBase.java delete mode 100644 groot-tests/test-common/src/test/java/com/geedgenetworks/test/common/container/AbstractTestContainer.java delete mode 100644 groot-tests/test-common/src/test/java/com/geedgenetworks/test/common/container/AbstractTestFlinkContainer.java delete mode 100644 groot-tests/test-common/src/test/java/com/geedgenetworks/test/common/container/ContainerExtendedFactory.java delete mode 100644 groot-tests/test-common/src/test/java/com/geedgenetworks/test/common/container/ContainerUtil.java delete mode 100644 groot-tests/test-common/src/test/java/com/geedgenetworks/test/common/container/EngineType.java delete mode 100644 groot-tests/test-common/src/test/java/com/geedgenetworks/test/common/container/Flink13Container.java delete mode 100644 groot-tests/test-common/src/test/java/com/geedgenetworks/test/common/container/Flink17Container.java delete mode 100644 groot-tests/test-common/src/test/java/com/geedgenetworks/test/common/container/TestContainer.java delete mode 100644 groot-tests/test-common/src/test/java/com/geedgenetworks/test/common/container/TestContainerId.java delete mode 100644 groot-tests/test-common/src/test/java/com/geedgenetworks/test/common/container/TestContainersFactory.java delete mode 100644 groot-tests/test-common/src/test/java/com/geedgenetworks/test/common/container/TestHelper.java delete mode 100644 groot-tests/test-common/src/test/java/com/geedgenetworks/test/common/junit/AnnotationUtil.java delete mode 100644 groot-tests/test-common/src/test/java/com/geedgenetworks/test/common/junit/ContainerTestingExtension.java delete mode 100644 groot-tests/test-common/src/test/java/com/geedgenetworks/test/common/junit/DisabledOnContainer.java delete mode 100644 groot-tests/test-common/src/test/java/com/geedgenetworks/test/common/junit/TestCaseInvocationContextProvider.java delete mode 100644 groot-tests/test-common/src/test/java/com/geedgenetworks/test/common/junit/TestContainerExtension.java delete mode 100644 groot-tests/test-common/src/test/java/com/geedgenetworks/test/common/junit/TestContainers.java delete mode 100644 groot-tests/test-common/src/test/java/com/geedgenetworks/test/common/junit/TestLoggerExtension.java delete mode 100644 groot-tests/test-common/src/test/resources/grootstream.yaml delete mode 100644 groot-tests/test-common/src/test/resources/log4j2.properties delete mode 100644 groot-tests/test-e2e-base/pom.xml delete mode 100644 groot-tests/test-e2e-base/src/test/java/com/geedgenetworks/test/e2e/base/EnvParameterIT.java delete mode 100644 groot-tests/test-e2e-base/src/test/java/com/geedgenetworks/test/e2e/base/Flink13Container.java delete mode 100644 groot-tests/test-e2e-base/src/test/java/com/geedgenetworks/test/e2e/base/InlineToPrintIT.java delete mode 100644 groot-tests/test-e2e-base/src/test/java/com/geedgenetworks/test/e2e/base/TestUtils.java delete mode 100644 groot-tests/test-e2e-base/src/test/resources/inline_to_print.yaml delete mode 100644 groot-tests/test-e2e-base/src/test/resources/kafka_to_print.yaml delete mode 100644 groot-tests/test-e2e-base/src/test/resources/test_env_parameter_inline_to_print.yaml delete mode 100644 groot-tests/test-e2e-clickhouse/pom.xml delete mode 100644 groot-tests/test-e2e-clickhouse/src/test/java/com/geedgenetworks/test/e2e/clickhouse/ClickHouseIT.java delete mode 100644 groot-tests/test-e2e-clickhouse/src/test/resources/clickhouse_data_type_sink.yaml delete mode 100644 groot-tests/test-e2e-clickhouse/src/test/resources/init/clickhouse_test_sql.conf delete mode 100644 groot-tests/test-e2e-clickhouse/src/test/resources/init/init-clickhouse.sql delete mode 100644 groot-tests/test-e2e-clickhouse/src/test/resources/init/users.xml create mode 100644 groot-tests/test-e2e-common/pom.xml create mode 100644 groot-tests/test-e2e-common/src/test/java/com/geedgenetworks/test/e2e/common/AbstractFlinkContainer.java create mode 100644 groot-tests/test-e2e-common/src/test/java/com/geedgenetworks/test/e2e/common/TestResource.java create mode 100644 groot-tests/test-e2e-common/src/test/java/com/geedgenetworks/test/e2e/common/TestSuiteBase.java create mode 100644 groot-tests/test-e2e-common/src/test/java/com/geedgenetworks/test/e2e/common/container/AbstractTestContainer.java create mode 100644 groot-tests/test-e2e-common/src/test/java/com/geedgenetworks/test/e2e/common/container/AbstractTestFlinkContainer.java create mode 100644 groot-tests/test-e2e-common/src/test/java/com/geedgenetworks/test/e2e/common/container/ContainerExtendedFactory.java create mode 100644 groot-tests/test-e2e-common/src/test/java/com/geedgenetworks/test/e2e/common/container/ContainerUtil.java create mode 100644 groot-tests/test-e2e-common/src/test/java/com/geedgenetworks/test/e2e/common/container/EngineType.java create mode 100644 groot-tests/test-e2e-common/src/test/java/com/geedgenetworks/test/e2e/common/container/Flink13Container.java create mode 100644 groot-tests/test-e2e-common/src/test/java/com/geedgenetworks/test/e2e/common/container/Flink17Container.java create mode 100644 groot-tests/test-e2e-common/src/test/java/com/geedgenetworks/test/e2e/common/container/TestContainer.java create mode 100644 groot-tests/test-e2e-common/src/test/java/com/geedgenetworks/test/e2e/common/container/TestContainerId.java create mode 100644 groot-tests/test-e2e-common/src/test/java/com/geedgenetworks/test/e2e/common/container/TestContainersFactory.java create mode 100644 groot-tests/test-e2e-common/src/test/java/com/geedgenetworks/test/e2e/common/container/TestHelper.java create mode 100644 groot-tests/test-e2e-common/src/test/java/com/geedgenetworks/test/e2e/common/junit/AnnotationUtil.java create mode 100644 groot-tests/test-e2e-common/src/test/java/com/geedgenetworks/test/e2e/common/junit/ContainerTestingExtension.java create mode 100644 groot-tests/test-e2e-common/src/test/java/com/geedgenetworks/test/e2e/common/junit/DisabledOnContainer.java create mode 100644 groot-tests/test-e2e-common/src/test/java/com/geedgenetworks/test/e2e/common/junit/TestCaseInvocationContextProvider.java create mode 100644 groot-tests/test-e2e-common/src/test/java/com/geedgenetworks/test/e2e/common/junit/TestContainerExtension.java create mode 100644 groot-tests/test-e2e-common/src/test/java/com/geedgenetworks/test/e2e/common/junit/TestContainers.java create mode 100644 groot-tests/test-e2e-common/src/test/java/com/geedgenetworks/test/e2e/common/junit/TestLoggerExtension.java create mode 100644 groot-tests/test-e2e-common/src/test/resources/grootstream.yaml create mode 100644 groot-tests/test-e2e-common/src/test/resources/log4j2.properties create mode 100644 groot-tests/test-e2e-connector-clickhouse/pom.xml create mode 100644 groot-tests/test-e2e-connector-clickhouse/src/test/java/com/geedgenetworks/test/e2e/connector/clickhouse/ClickHouseIT.java create mode 100644 groot-tests/test-e2e-connector-clickhouse/src/test/resources/clickhouse_data_type_sink.yaml create mode 100644 groot-tests/test-e2e-connector-clickhouse/src/test/resources/init/clickhouse_test_sql.conf create mode 100644 groot-tests/test-e2e-connector-clickhouse/src/test/resources/init/init-clickhouse.sql create mode 100644 groot-tests/test-e2e-connector-clickhouse/src/test/resources/init/users.xml create mode 100644 groot-tests/test-e2e-connector-kafka/pom.xml create mode 100644 groot-tests/test-e2e-connector-kafka/src/test/java/com/geedgenetworks/test/e2e/connector/kafka/KafkaIT.java create mode 100644 groot-tests/test-e2e-connector-kafka/src/test/resources/kafka_client_jass_cli.properties create mode 100644 groot-tests/test-e2e-connector-kafka/src/test/resources/kafka_producer_quota.yaml create mode 100644 groot-tests/test-e2e-connector-kafka/src/test/resources/kafka_server_jaas.conf create mode 100644 groot-tests/test-e2e-connector-kafka/src/test/resources/kafka_sink.yaml create mode 100644 groot-tests/test-e2e-connector-kafka/src/test/resources/kafka_sink_handle_error_json_format.yaml create mode 100644 groot-tests/test-e2e-connector-kafka/src/test/resources/kafka_sink_skip_error_json_format.yaml create mode 100644 groot-tests/test-e2e-connector-kafka/src/test/resources/kafka_source.yaml create mode 100644 groot-tests/test-e2e-connector-kafka/src/test/resources/kafka_source_error_schema.yaml create mode 100644 groot-tests/test-e2e-core/pom.xml create mode 100644 groot-tests/test-e2e-core/src/test/java/com/geedgenetworks/test/e2e/core/EnvParameterIT.java create mode 100644 groot-tests/test-e2e-core/src/test/java/com/geedgenetworks/test/e2e/core/Flink13Container.java create mode 100644 groot-tests/test-e2e-core/src/test/java/com/geedgenetworks/test/e2e/core/InlineToPrintIT.java create mode 100644 groot-tests/test-e2e-core/src/test/java/com/geedgenetworks/test/e2e/core/ProcessorIT.java create mode 100644 groot-tests/test-e2e-core/src/test/java/com/geedgenetworks/test/e2e/core/TestUtils.java create mode 100644 groot-tests/test-e2e-core/src/test/resources/inline_to_print.yaml create mode 100644 groot-tests/test-e2e-core/src/test/resources/job_aggregate_processor.yaml create mode 100644 groot-tests/test-e2e-core/src/test/resources/job_split_processor.yaml create mode 100644 groot-tests/test-e2e-core/src/test/resources/test_env_parameter_inline_to_print.yaml delete mode 100644 groot-tests/test-e2e-kafka/pom.xml delete mode 100644 groot-tests/test-e2e-kafka/src/test/java/com/geedgenetworks/test/e2e/kafka/KafkaIT.java delete mode 100644 groot-tests/test-e2e-kafka/src/test/resources/kafka_client_jass_cli.properties delete mode 100644 groot-tests/test-e2e-kafka/src/test/resources/kafka_producer_quota.yaml delete mode 100644 groot-tests/test-e2e-kafka/src/test/resources/kafka_server_jaas.conf delete mode 100644 groot-tests/test-e2e-kafka/src/test/resources/kafka_sink.yaml delete mode 100644 groot-tests/test-e2e-kafka/src/test/resources/kafka_sink_handle_error_json_format.yaml delete mode 100644 groot-tests/test-e2e-kafka/src/test/resources/kafka_sink_skip_error_json_format.yaml delete mode 100644 groot-tests/test-e2e-kafka/src/test/resources/kafka_source.yaml delete mode 100644 groot-tests/test-e2e-kafka/src/test/resources/kafka_source_error_schema.yaml (limited to 'groot-tests') diff --git a/groot-tests/pom.xml b/groot-tests/pom.xml index 47b9177..51ebbbd 100644 --- a/groot-tests/pom.xml +++ b/groot-tests/pom.xml @@ -13,10 +13,10 @@ pom Groot : Tests : - test-common - test-e2e-base - test-e2e-kafka - test-e2e-clickhouse + test-e2e-common + test-e2e-core + test-e2e-connector-kafka + test-e2e-connector-clickhouse diff --git a/groot-tests/test-common/pom.xml b/groot-tests/test-common/pom.xml deleted file mode 100644 index c086f41..0000000 --- a/groot-tests/test-common/pom.xml +++ /dev/null @@ -1,45 +0,0 @@ - - - 4.0.0 - - com.geedgenetworks - groot-tests - ${revision} - - - test-common - Groot : Tests : Common - - - - - - - - - - - - - - org.apache.maven.plugins - maven-jar-plugin - ${maven-jar-plugin.version} - - false - - - - - test-jar - - - - - - - - - \ No newline at end of file diff --git a/groot-tests/test-common/src/test/java/com/geedgenetworks/test/common/AbstractFlinkContainer.java b/groot-tests/test-common/src/test/java/com/geedgenetworks/test/common/AbstractFlinkContainer.java deleted file mode 100644 index 3444e0d..0000000 --- a/groot-tests/test-common/src/test/java/com/geedgenetworks/test/common/AbstractFlinkContainer.java +++ /dev/null @@ -1,37 +0,0 @@ -package com.geedgenetworks.test.common; - - -import com.geedgenetworks.test.common.container.AbstractTestFlinkContainer; -import lombok.extern.slf4j.Slf4j; -import org.junit.jupiter.api.AfterAll; -import org.junit.jupiter.api.BeforeAll; -import org.junit.jupiter.api.TestInstance; -import org.testcontainers.containers.Container; - -import java.io.IOException; - -@Slf4j -@TestInstance(TestInstance.Lifecycle.PER_CLASS) -public abstract class AbstractFlinkContainer extends AbstractTestFlinkContainer { - @Override - @BeforeAll - public void startUp() throws Exception { - super.startUp(); - log.info("The TestContainer[{}] is running.", identifier()); - } - - @Override - @AfterAll - public void tearDown() throws Exception { - super.tearDown(); - log.info("The TestContainer[{}] is closed.", identifier()); - } - - - public Container.ExecResult executeGrootStreamFlinkJob(String confFile) - throws IOException, InterruptedException { - return executeJob(confFile); - } - - -} diff --git a/groot-tests/test-common/src/test/java/com/geedgenetworks/test/common/TestResource.java b/groot-tests/test-common/src/test/java/com/geedgenetworks/test/common/TestResource.java deleted file mode 100644 index f84307e..0000000 --- a/groot-tests/test-common/src/test/java/com/geedgenetworks/test/common/TestResource.java +++ /dev/null @@ -1,27 +0,0 @@ -package com.geedgenetworks.test.common; - -/** - * Basic abstractions for all resources used in connector testing framework. - * - *

Lifecycle of test resources will be managed by the framework. - */ -public interface TestResource { - - /** - * Start up the test resource. - * - *

The implementation of this method should be idempotent. - * - * @throws Exception if anything wrong when starting the resource - */ - void startUp() throws Exception; - - /** - * Tear down the test resource. - * - *

The test resource should be able to tear down even without a startup (could be a no-op). - * - * @throws Exception if anything wrong when tearing the resource down - */ - void tearDown() throws Exception; -} diff --git a/groot-tests/test-common/src/test/java/com/geedgenetworks/test/common/TestSuiteBase.java b/groot-tests/test-common/src/test/java/com/geedgenetworks/test/common/TestSuiteBase.java deleted file mode 100644 index 83f4a7c..0000000 --- a/groot-tests/test-common/src/test/java/com/geedgenetworks/test/common/TestSuiteBase.java +++ /dev/null @@ -1,29 +0,0 @@ -package com.geedgenetworks.test.common; - -import com.geedgenetworks.test.common.container.ContainerUtil; -import com.geedgenetworks.test.common.container.TestContainer; -import com.geedgenetworks.test.common.container.TestContainersFactory; -import com.geedgenetworks.test.common.junit.ContainerTestingExtension; -import com.geedgenetworks.test.common.junit.TestCaseInvocationContextProvider; -import com.geedgenetworks.test.common.junit.TestContainers; -import com.geedgenetworks.test.common.junit.TestLoggerExtension; -import com.github.dockerjava.api.DockerClient; -import org.junit.jupiter.api.TestInstance; -import org.junit.jupiter.api.extension.ExtendWith; -import org.testcontainers.DockerClientFactory; -import org.testcontainers.containers.Network; - -@ExtendWith({ - ContainerTestingExtension.class, - TestLoggerExtension.class, - TestCaseInvocationContextProvider.class -}) -@TestInstance(TestInstance.Lifecycle.PER_CLASS) -public abstract class TestSuiteBase { - protected static final Network NETWORK = TestContainer.NETWORK; - @TestContainers - private TestContainersFactory containersFactory = ContainerUtil::discoverTestContainers; - protected DockerClient dockerClient = DockerClientFactory.lazyClient(); - - -} diff --git a/groot-tests/test-common/src/test/java/com/geedgenetworks/test/common/container/AbstractTestContainer.java b/groot-tests/test-common/src/test/java/com/geedgenetworks/test/common/container/AbstractTestContainer.java deleted file mode 100644 index 14eb5fb..0000000 --- a/groot-tests/test-common/src/test/java/com/geedgenetworks/test/common/container/AbstractTestContainer.java +++ /dev/null @@ -1,192 +0,0 @@ -package com.geedgenetworks.test.common.container; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.testcontainers.containers.Container; -import org.testcontainers.containers.GenericContainer; - -import java.io.File; -import java.io.IOException; -import java.nio.file.Paths; -import java.util.ArrayList; -import java.util.List; -import static com.geedgenetworks.test.common.container.ContainerUtil.PROJECT_ROOT_PATH; -public abstract class AbstractTestContainer implements TestContainer { - protected static final Logger LOG = LoggerFactory.getLogger(AbstractTestContainer.class); - - public static final String GROOTSTREAM_HOME = "/tmp/grootstream/"; - - protected final String startModuleName; - - protected final String startModuleFullPath; - - public AbstractTestContainer() { - this.startModuleName = getStartModuleName(); - this.startModuleFullPath = - PROJECT_ROOT_PATH - + File.separator - + this.startModuleName; - ContainerUtil.checkPathExist(startModuleFullPath); - } - - protected abstract String getDockerImage(); - - protected abstract String getStartModuleName(); - - protected abstract String getStartShellName(); - - protected abstract String getConnectorModulePath(); - - protected abstract String getConnectorType(); - - protected abstract String getSavePointCommand(); - - protected abstract String getRestoreCommand(); - - protected abstract String getConnectorNamePrefix(); - - protected abstract List getExtraStartShellCommands(); - - protected void executeExtraCommands(GenericContainer container) - throws IOException, InterruptedException { - // do nothing - } - - protected void copyGrootStreamStarterToContainer(GenericContainer container) { - ContainerUtil.copyGrootStreamStarterToContainer( - container, this.startModuleName, this.startModuleFullPath, GROOTSTREAM_HOME); - } - - protected void copyGrootStreamStarterLoggingToContainer(GenericContainer container) { - ContainerUtil.copyGrootStreamStarterLoggingToContainer( - container, this.startModuleFullPath, GROOTSTREAM_HOME); - } - - protected Container.ExecResult executeJob(GenericContainer container, String confFile, List variables) - throws IOException, InterruptedException { - - final String confInContainerPath = ContainerUtil.copyConfigFileToContainer(container, confFile); - // copy connectors - ContainerUtil.copyConnectorJarToContainer( - container, - confFile, - getConnectorModulePath(), - getConnectorNamePrefix(), - getConnectorType(), - GROOTSTREAM_HOME); - final List command = new ArrayList<>(); - String binPath = Paths.get(GROOTSTREAM_HOME, "bin", getStartShellName()).toString(); - // base command - command.add(ContainerUtil.adaptPathForWin(binPath)); - command.add("--config"); - command.add(ContainerUtil.adaptPathForWin(confInContainerPath)); - command.add("--target"); - command.add("remote"); - List extraStartShellCommands = new ArrayList<>(getExtraStartShellCommands()); - if (variables != null && !variables.isEmpty()) { - variables.forEach( - v -> { - extraStartShellCommands.add("-i"); - extraStartShellCommands.add(v); - }); - } - command.addAll(extraStartShellCommands); - return executeCommand(container, command); - } - - - - protected Container.ExecResult executeJob(GenericContainer container, String confFile) - throws IOException, InterruptedException { - return executeJob(container, confFile, null); - } - - - - protected Container.ExecResult savepointJob(GenericContainer container, String jobId) - throws IOException, InterruptedException { - final List command = new ArrayList<>(); - String binPath = Paths.get(GROOTSTREAM_HOME, "bin", getStartShellName()).toString(); - // base command - command.add(ContainerUtil.adaptPathForWin(binPath)); - command.add(getSavePointCommand()); - command.add(jobId); - command.addAll(getExtraStartShellCommands()); - return executeCommand(container, command); - } - - protected Container.ExecResult restoreJob( - GenericContainer container, String confFile, String jobId) - throws IOException, InterruptedException { - final String confInContainerPath = ContainerUtil.copyConfigFileToContainer(container, confFile); - // copy connectors - ContainerUtil.copyConnectorJarToContainer( - container, - confFile, - getConnectorModulePath(), - getConnectorNamePrefix(), - getConnectorType(), - GROOTSTREAM_HOME); - final List command = new ArrayList<>(); - String binPath = Paths.get(GROOTSTREAM_HOME, "bin", getStartShellName()).toString(); - // base command - command.add(ContainerUtil.adaptPathForWin(binPath)); - command.add("--config"); - command.add(ContainerUtil.adaptPathForWin(confInContainerPath)); - command.add(getRestoreCommand()); - command.add(jobId); - command.addAll(getExtraStartShellCommands()); - return executeCommand(container, command); - } - - protected Container.ExecResult executeCommand( - GenericContainer container, List command) - throws IOException, InterruptedException { - String commandStr = String.join(" ", command); - LOG.info( - "Execute command in container[{}] " - + "\n==================== Shell Command start ====================\n" - + "{}" - + "\n==================== Shell Command end ====================", - container.getDockerImageName(), - commandStr); - Container.ExecResult execResult = container.execInContainer("bash", "-c", commandStr); - - if (execResult.getStdout() != null && !execResult.getStdout().isEmpty()) { - LOG.info( - "Container[{}] command {} STDOUT:" - + "\n==================== STDOUT start ====================\n" - + "{}" - + "\n==================== STDOUT end ====================", - container.getDockerImageName(), - commandStr, - execResult.getStdout()); - } - if (execResult.getStderr() != null && !execResult.getStderr().isEmpty()) { - LOG.error( - "Container[{}] command {} STDERR:" - + "\n==================== STDERR start ====================\n" - + "{}" - + "\n==================== STDERR end ====================", - container.getDockerImageName(), - commandStr, - execResult.getStderr()); - } - - if (execResult.getExitCode() != 0) { - LOG.info( - "Container[{}] command {} Server Log:" - + "\n==================== Server Log start ====================\n" - + "{}" - + "\n==================== Server Log end ====================", - container.getDockerImageName(), - commandStr, - container.getLogs()); - } - - return execResult; - } - - - -} diff --git a/groot-tests/test-common/src/test/java/com/geedgenetworks/test/common/container/AbstractTestFlinkContainer.java b/groot-tests/test-common/src/test/java/com/geedgenetworks/test/common/container/AbstractTestFlinkContainer.java deleted file mode 100644 index 4ac3d03..0000000 --- a/groot-tests/test-common/src/test/java/com/geedgenetworks/test/common/container/AbstractTestFlinkContainer.java +++ /dev/null @@ -1,158 +0,0 @@ -package com.geedgenetworks.test.common.container; - -import com.google.common.collect.Lists; -import lombok.NoArgsConstructor; -import lombok.extern.slf4j.Slf4j; -import org.testcontainers.containers.Container; -import org.testcontainers.containers.GenericContainer; -import org.testcontainers.containers.output.Slf4jLogConsumer; -import org.testcontainers.containers.wait.strategy.LogMessageWaitStrategy; -import org.testcontainers.images.PullPolicy; -import org.testcontainers.lifecycle.Startables; -import org.testcontainers.utility.DockerLoggerFactory; - -import java.io.IOException; -import java.time.Duration; -import java.util.Arrays; -import java.util.Collections; -import java.util.List; -import java.util.stream.Stream; - -@NoArgsConstructor -@Slf4j -public abstract class AbstractTestFlinkContainer extends AbstractTestContainer { - protected static final List DEFAULT_FLINK_PROPERTIES = - Arrays.asList( - "jobmanager.rpc.address: jobmanager", - "taskmanager.numberOfTaskSlots: 10", - "parallelism.default: 4", - "env.java.opts: -Doracle.jdbc.timezoneAsRegion=false"); - - protected static final String DEFAULT_DOCKER_IMAGE = "flink:1.13.1-scala_2.11-java11"; - - protected GenericContainer jobManager; - protected GenericContainer taskManager; - - @Override - protected String getDockerImage() { - return DEFAULT_DOCKER_IMAGE; - } - - @Override - public void startUp() throws Exception { - final String dockerImage = getDockerImage(); - final String properties = String.join("\n", getFlinkProperties()); - jobManager = - new GenericContainer<>(dockerImage) - .withCommand("jobmanager") - .withNetwork(NETWORK) - .withNetworkAliases("jobmanager") - .withExposedPorts() - .withEnv("FLINK_PROPERTIES", properties) - .withLogConsumer( - new Slf4jLogConsumer( - DockerLoggerFactory.getLogger(dockerImage + ":jobmanager"))) - .waitingFor( - new LogMessageWaitStrategy() - .withRegEx(".*Starting the resource manager.*") - .withStartupTimeout(Duration.ofMinutes(2))) - ; - - // Copy groot-stream bootstrap and some other files to the container - copyGrootStreamStarterToContainer(jobManager); - copyGrootStreamStarterLoggingToContainer(jobManager); - - jobManager.setPortBindings(Lists.newArrayList(String.format("%s:%s", 8084, 8081))); - - taskManager = - new GenericContainer<>(dockerImage) - .withCommand("taskmanager") - .withNetwork(NETWORK) - .withNetworkAliases("taskmanager") - .withEnv("FLINK_PROPERTIES", properties) - .dependsOn(jobManager) - .withLogConsumer( - new Slf4jLogConsumer( - DockerLoggerFactory.getLogger( - dockerImage + ":taskmanager"))) - .waitingFor( - new LogMessageWaitStrategy() - .withRegEx( - ".*Successful registration at resource manager.*") - .withStartupTimeout(Duration.ofMinutes(2))); - - // Copy groot-stream bootstrap and some other files to the container - copyGrootStreamStarterToContainer(taskManager); - copyGrootStreamStarterLoggingToContainer(taskManager); - - Startables.deepStart(Stream.of(jobManager)).join(); - Startables.deepStart(Stream.of(taskManager)).join(); - // execute extra commands - executeExtraCommands(jobManager); - } - - protected List getFlinkProperties() { - return DEFAULT_FLINK_PROPERTIES; - } - - @Override - public void tearDown() throws Exception { - if (taskManager != null) { - taskManager.stop(); - } - if (jobManager != null) { - jobManager.stop(); - } - } - - @Override - protected String getSavePointCommand() { - throw new UnsupportedOperationException("Not implemented"); - } - - @Override - protected String getRestoreCommand() { - throw new UnsupportedOperationException("Not implemented"); - } - - @Override - protected List getExtraStartShellCommands() { - return Collections.emptyList(); - } - - - public void executeExtraCommands(ContainerExtendedFactory extendedFactory) - throws IOException, InterruptedException { - extendedFactory.extend(jobManager); - extendedFactory.extend(taskManager); - } - - - @Override - public Container.ExecResult executeJob(String confFile) - throws IOException, InterruptedException { - return executeJob(confFile, null); - } - - @Override - public Container.ExecResult executeJob(String confFile, List variables) - throws IOException, InterruptedException { - log.info("test in container: {}", identifier()); - return executeJob(jobManager, confFile, variables); - } - - @Override - public String getServerLogs() { - return jobManager.getLogs() + "\n" + taskManager.getLogs(); - } - - - public String executeJobManagerInnerCommand(String command) - throws IOException, InterruptedException { - return jobManager.execInContainer("bash", "-c", command).getStdout(); - } - - - - -} diff --git a/groot-tests/test-common/src/test/java/com/geedgenetworks/test/common/container/ContainerExtendedFactory.java b/groot-tests/test-common/src/test/java/com/geedgenetworks/test/common/container/ContainerExtendedFactory.java deleted file mode 100644 index c7c1df0..0000000 --- a/groot-tests/test-common/src/test/java/com/geedgenetworks/test/common/container/ContainerExtendedFactory.java +++ /dev/null @@ -1,11 +0,0 @@ -package com.geedgenetworks.test.common.container; - -import org.testcontainers.containers.GenericContainer; - -import java.io.IOException; - -@FunctionalInterface -public interface ContainerExtendedFactory { - void extend(GenericContainer engineMasterContainer) throws IOException, InterruptedException; - -} diff --git a/groot-tests/test-common/src/test/java/com/geedgenetworks/test/common/container/ContainerUtil.java b/groot-tests/test-common/src/test/java/com/geedgenetworks/test/common/container/ContainerUtil.java deleted file mode 100644 index 811bdf6..0000000 --- a/groot-tests/test-common/src/test/java/com/geedgenetworks/test/common/container/ContainerUtil.java +++ /dev/null @@ -1,355 +0,0 @@ -package com.geedgenetworks.test.common.container; - -import cn.hutool.core.util.XmlUtil; -import com.geedgenetworks.bootstrap.utils.ConfigBuilder; -import com.google.common.collect.Lists; -import com.typesafe.config.Config; -import com.typesafe.config.ConfigFactory; -import com.typesafe.config.ConfigResolveOptions; -import groovy.lang.Tuple2; -import lombok.extern.slf4j.Slf4j; -import org.apache.commons.lang3.StringUtils; -import org.junit.jupiter.api.Assertions; -import org.testcontainers.containers.Container; -import org.testcontainers.containers.GenericContainer; -import org.testcontainers.utility.MountableFile; -import org.w3c.dom.Document; -import org.w3c.dom.Element; -import java.io.File; -import java.io.IOException; -import java.nio.file.Path; -import java.nio.file.Paths; -import java.util.*; -import java.util.stream.Collectors; - -@Slf4j -public final class ContainerUtil { - - public static final String PROJECT_ROOT_PATH = getProjectRootPath(); - public static final String PLUGIN_MAPPING_FILE = "plugin-mapping.properties"; - - - private static String getProjectRootPath() { - String testCommonRootModuleDir = "groot-tests"; - Path path = Paths.get(System.getProperty("user.dir")); - while (!path.endsWith(Paths.get(testCommonRootModuleDir))) { - path = path.getParent(); - } - return path.getParent().toString(); - } - - public static String getProjectVersion() { - String pomFile = PROJECT_ROOT_PATH + File.separator + "pom.xml"; - checkPathExist(pomFile); - Document docResult = XmlUtil.readXML(new File(pomFile)); - Element project = XmlUtil.getRootElement(docResult); - Element properties = XmlUtil.getElement(project, "properties"); - Element revisionElement = XmlUtil.getElement(properties, "revision"); - return revisionElement.getTextContent(); - } - - - public static void checkPathExist(String path) { - Assertions.assertTrue(new File(path).exists(), path + " must exist"); - } - - public static void copyGrootStreamStarterToContainer( - GenericContainer container, - String startModuleName, - String startModulePath, - String GrootStreamHomeInContainer) { - - final String[] splits = StringUtils.split(startModuleName, File.separator); - final String startJarName = splits[splits.length - 1] + ".jar"; - final String startJarPath = - startModulePath + File.separator + "target" + File.separator + startJarName; - checkPathExist(startJarPath); - - // don't use container#withFileSystemBind, this isn't supported in Windows. - container.withCopyFileToContainer( - MountableFile.forHostPath(startJarPath), - Paths.get(GrootStreamHomeInContainer, "bootstrap", startJarName).toString()); - - - // copy libs - - String formatJsonJar = "format-json-" + getProjectVersion() + ".jar"; - Path formatJsonJarPath = - Paths.get(PROJECT_ROOT_PATH, "groot-formats/format-json", "target", formatJsonJar); - container.withCopyFileToContainer( - MountableFile.forHostPath(formatJsonJarPath), - Paths.get(GrootStreamHomeInContainer, "lib", formatJsonJar).toString()); - - String formatProtobufJar = "format-protobuf-" + getProjectVersion() + ".jar"; - Path formatProtobufJarPath = - Paths.get(PROJECT_ROOT_PATH, "groot-formats/format-protobuf", "target", formatProtobufJar); - container.withCopyFileToContainer( - MountableFile.forHostPath(formatProtobufJarPath), - Paths.get(GrootStreamHomeInContainer, "lib", formatProtobufJar).toString()); - - String formatMsgpackJar = "format-msgpack-" + getProjectVersion() + ".jar"; - Path formatMsgpackJarPath = - Paths.get(PROJECT_ROOT_PATH, "groot-formats/format-msgpack", "target", formatMsgpackJar); - container.withCopyFileToContainer( MountableFile.forHostPath(formatMsgpackJarPath), - Paths.get(GrootStreamHomeInContainer, "lib", formatMsgpackJar).toString()); - - String formatRawJar = "format-raw-" + getProjectVersion() + ".jar"; - Path formatRawJarPath = - Paths.get(PROJECT_ROOT_PATH, "groot-formats/format-raw", "target", formatRawJar); - container.withCopyFileToContainer( MountableFile.forHostPath(formatRawJarPath), - Paths.get(GrootStreamHomeInContainer, "lib", formatRawJar).toString()); - - - //copy system config - final String configPath = PROJECT_ROOT_PATH + "/config"; - checkPathExist(configPath); - container.withCopyFileToContainer(MountableFile.forHostPath(configPath), - Paths.get(GrootStreamHomeInContainer, "config").toString()); - - // copy grootstream.yaml - final String grootTestsCommonPath = PROJECT_ROOT_PATH + "/groot-tests/test-common/src/test/resources"; - checkPathExist(grootTestsCommonPath); - container.withCopyFileToContainer( - MountableFile.forHostPath(grootTestsCommonPath + "/grootstream.yaml"), - Paths.get(GrootStreamHomeInContainer, "config", "grootstream.yaml").toString()); - - - // copy bin - final String startBinPath = startModulePath + File.separator + "src/main/bin/"; - checkPathExist(startBinPath); - container.withCopyFileToContainer( - MountableFile.forHostPath(startBinPath), - Paths.get(GrootStreamHomeInContainer, "bin").toString()); - - // copy plugin-mapping.properties - container.withCopyFileToContainer( - MountableFile.forHostPath(PROJECT_ROOT_PATH + "/plugin-mapping.properties"), - Paths.get(GrootStreamHomeInContainer, "connectors", PLUGIN_MAPPING_FILE).toString()); - - - - } - - public static void copyGrootStreamStarterLoggingToContainer( - GenericContainer container, - String startModulePath, - String GrootStreamHomeInContainer) { - // copy logging lib - final String loggingLibPath = - startModulePath - + File.separator - + "target" - + File.separator - + "logging-e2e" - + File.separator; - checkPathExist(loggingLibPath); - container.withCopyFileToContainer( - MountableFile.forHostPath(loggingLibPath), - Paths.get(GrootStreamHomeInContainer, "bootstrap", "logging").toString()); - } - - public static String copyConfigFileToContainer(GenericContainer container, String confFile) { - final String targetConfInContainer = Paths.get("/tmp", confFile).toString(); - container.copyFileToContainer( - MountableFile.forHostPath(getResourcesFile(confFile).getAbsolutePath()), - targetConfInContainer); - return targetConfInContainer; - } - - public static File getResourcesFile(String confFile) { - File file = new File(getCurrentModulePath() + "/src/test/resources" + confFile); - if (file.exists()) { - return file; - } - throw new IllegalArgumentException(confFile + " doesn't exist"); - } - - public static Path getCurrentModulePath() { - return Paths.get(System.getProperty("user.dir")); - } - - public static void copyConnectorJarToContainer( - GenericContainer container, - String confFile, - String connectorsRootPath, - String connectorPrefix, - String connectorType, - String grootStreamHome) { - Config jobConfig = getJobConfig(getResourcesFile(confFile)); - Config connectorsMapping = - getPluginProperties(new File(PROJECT_ROOT_PATH + File.separator + PLUGIN_MAPPING_FILE)); - if (!connectorsMapping.hasPath(connectorType) - || connectorsMapping.getConfig(connectorType).isEmpty()) { - return; - } - Config connectors = connectorsMapping.getConfig(connectorType); - Set connectorNames = getConnectors(jobConfig, connectors, "source"); - connectorNames.addAll(getConnectors(jobConfig, connectors, "sink")); - File module = new File(PROJECT_ROOT_PATH + File.separator + connectorsRootPath); - - List connectorFiles = getConnectorFiles(module, connectorNames, connectorPrefix); - connectorFiles.forEach( - jar -> - container.copyFileToContainer( - MountableFile.forHostPath(jar.getAbsolutePath()), - Paths.get(grootStreamHome, "connectors", jar.getName()).toString())); - } - - public static String adaptPathForWin(String path) { - // Running IT use cases under Windows requires replacing \ with / - return path == null ? "" : path.replaceAll("\\\\", "/"); - } - - private static List getConnectorFiles( - File currentModule, Set connectorNames, String connectorPrefix) { - List connectorFiles = new ArrayList<>(); - for (File file : Objects.requireNonNull(currentModule.listFiles())) { - getConnectorFiles(file, connectorNames, connectorPrefix, connectorFiles); - } - return connectorFiles; - } - - private static void getConnectorFiles( - File currentModule, - Set connectorNames, - String connectorPrefix, - List connectors) { - if (currentModule.isFile() || connectorNames.size() == connectors.size()) { - return; - } - if (connectorNames.contains(currentModule.getName())) { - File targetPath = new File(currentModule.getAbsolutePath() + File.separator + "target"); - for (File file : Objects.requireNonNull(targetPath.listFiles())) { - if (file.getName().startsWith(currentModule.getName()) - && !file.getName().endsWith("javadoc.jar") - && !file.getName().endsWith("tests.jar")) { - connectors.add(file); - return; - } - } - } - - if (currentModule.getName().startsWith(connectorPrefix)) { - for (File file : Objects.requireNonNull(currentModule.listFiles())) { - getConnectorFiles(file, connectorNames, connectorPrefix, connectors); - } - } - } - - public static List discoverTestContainers() { - try { - final List result = new LinkedList<>(); - ServiceLoader.load(TestContainer.class, Thread.currentThread().getContextClassLoader()) - .iterator() - .forEachRemaining(result::add); - return result; - } catch (ServiceConfigurationError e) { - log.error("Could not load service provider for containers.", e); - throw new RuntimeException("Could not load service provider for containers.", e); - } - } - - private static Set getConnectors( - Config jobConfig, Config connectorsMap, String pluginType) { - // using specific needed plugin type in the job config - Config connectorConfig = jobConfig.getConfig(pluginType+"s"); - List connectorList = Lists.newArrayList(); - connectorConfig.root().unwrapped().forEach((key,value) -> { - Map map = (Map) value; - connectorList.add(map.get("type").toString()); - - }); - - Map connectors = new TreeMap<>(String.CASE_INSENSITIVE_ORDER); - connectorsMap.getConfig(pluginType).entrySet().forEach(entry -> { - connectors.put(entry.getKey(), entry.getValue().unwrapped().toString()); - }); - - return connectorList.stream() - .map(String::toLowerCase) - .filter(connectors::containsKey) - .map(connectors::get) - .collect(Collectors.toSet()); - } - - private static Config getJobConfig(File file) { - return ConfigBuilder.of(file.getAbsolutePath()); - } - - private static Config getPluginProperties(File file) { - return ConfigFactory.parseFile(file) - .resolve(ConfigResolveOptions.defaults().setAllowUnresolved(true)) - .resolveWith( - ConfigFactory.systemProperties(), - ConfigResolveOptions.defaults().setAllowUnresolved(true)); - } - - public static List getJVMThreadNames(GenericContainer container) - throws IOException, InterruptedException { - return getJVMThreads(container).stream().map(Tuple2::getV1).collect(Collectors.toList()); - } - - public static Map getJVMLiveObject(GenericContainer container) - throws IOException, InterruptedException { - Container.ExecResult liveObjects = - container.execInContainer("jmap", "-histo:live", getJVMProcessId(container)); - Assertions.assertEquals(0, liveObjects.getExitCode()); - String value = liveObjects.getStdout().trim(); - return Arrays.stream(value.split("\n")) - .skip(2) - .map( - str -> - Arrays.stream(str.split(" ")) - .filter(StringUtils::isNotEmpty) - .collect(Collectors.toList())) - .filter(list -> list.size() == 4) - .collect( - Collectors.toMap( - list -> list.get(3), - list -> Integer.valueOf(list.get(1)), - (a, b) -> a)); - } - - public static List> getJVMThreads(GenericContainer container) - throws IOException, InterruptedException { - Container.ExecResult threads = - container.execInContainer("jstack", getJVMProcessId(container)); - Assertions.assertEquals(0, threads.getExitCode()); - // Thread name line example - // "hz.main.MetricsRegistry.thread-2" #232 prio=5 os_prio=0 tid=0x0000ffff3c003000 nid=0x5e - // waiting on condition [0x0000ffff6cf3a000] - return Arrays.stream(threads.getStdout().trim().split("\n\n")) - .filter(s -> s.startsWith("\"")) - .map( - threadStr -> - new Tuple2<>( - Arrays.stream(threadStr.split("\n")) - .filter(s -> s.startsWith("\"")) - .map(s -> s.substring(1, s.lastIndexOf("\""))) - .findFirst() - .get(), - threadStr)) - .collect(Collectors.toList()); - } - - private static String getJVMProcessId(GenericContainer container) - throws IOException, InterruptedException { - Container.ExecResult processes = container.execInContainer("jps"); - Assertions.assertEquals(0, processes.getExitCode()); - Optional server = - Arrays.stream(processes.getStdout().trim().split("\n")) - .filter(s -> s.contains("GrootstreamServer")) - .findFirst(); - Assertions.assertTrue(server.isPresent()); - return server.get().trim().split(" ")[0]; - } - - - - - - - - - -} diff --git a/groot-tests/test-common/src/test/java/com/geedgenetworks/test/common/container/EngineType.java b/groot-tests/test-common/src/test/java/com/geedgenetworks/test/common/container/EngineType.java deleted file mode 100644 index 4f348ae..0000000 --- a/groot-tests/test-common/src/test/java/com/geedgenetworks/test/common/container/EngineType.java +++ /dev/null @@ -1,16 +0,0 @@ -package com.geedgenetworks.test.common.container; - -import lombok.AllArgsConstructor; -import lombok.Getter; - -@Getter -@AllArgsConstructor -public enum EngineType { - FLINK("Flink"); - private final String name; - @Override - public String toString() { - return name; - } - -} diff --git a/groot-tests/test-common/src/test/java/com/geedgenetworks/test/common/container/Flink13Container.java b/groot-tests/test-common/src/test/java/com/geedgenetworks/test/common/container/Flink13Container.java deleted file mode 100644 index 338c696..0000000 --- a/groot-tests/test-common/src/test/java/com/geedgenetworks/test/common/container/Flink13Container.java +++ /dev/null @@ -1,47 +0,0 @@ -package com.geedgenetworks.test.common.container; - -import com.google.auto.service.AutoService; -import lombok.NoArgsConstructor; - -@NoArgsConstructor -@AutoService(TestContainer.class) -public class Flink13Container extends AbstractTestFlinkContainer { - - @Override - protected String getStartModuleName() { - return "groot-bootstrap"; - - } - - @Override - protected String getStartShellName() { - return "start.sh"; - } - - @Override - protected String getConnectorModulePath() { - return "groot-connectors"; - } - - @Override - protected String getConnectorType() { - return "grootstream"; - } - - @Override - protected String getConnectorNamePrefix() { - return "connector-"; - } - - @Override - public TestContainerId identifier() { - return TestContainerId.FLINK_1_13; - } - - @Override - protected String getDockerImage() { - return "192.168.40.153:8082/common/flink:1.13.1-scala_2.11-java11"; - } - - -} diff --git a/groot-tests/test-common/src/test/java/com/geedgenetworks/test/common/container/Flink17Container.java b/groot-tests/test-common/src/test/java/com/geedgenetworks/test/common/container/Flink17Container.java deleted file mode 100644 index 371b542..0000000 --- a/groot-tests/test-common/src/test/java/com/geedgenetworks/test/common/container/Flink17Container.java +++ /dev/null @@ -1,43 +0,0 @@ -package com.geedgenetworks.test.common.container; - -import com.google.auto.service.AutoService; -import lombok.NoArgsConstructor; - -@NoArgsConstructor -@AutoService(TestContainer.class) -public class Flink17Container extends AbstractTestFlinkContainer { - - @Override - protected String getStartModuleName() { - return "groot-bootstrap"; - } - - @Override - protected String getStartShellName() { - return "start.sh"; - } - - @Override - protected String getConnectorModulePath() { - return "groot-connectors"; - } - - @Override - protected String getConnectorType() { - return "grootstream"; - } - - @Override - protected String getConnectorNamePrefix() { - return "connector-"; - } - - @Override - public TestContainerId identifier() { - return TestContainerId.FLINK_1_17 ; - } - @Override - protected String getDockerImage() { - return "flink:1.17.2-scala_2.12-java11"; - } -} diff --git a/groot-tests/test-common/src/test/java/com/geedgenetworks/test/common/container/TestContainer.java b/groot-tests/test-common/src/test/java/com/geedgenetworks/test/common/container/TestContainer.java deleted file mode 100644 index b3bf77a..0000000 --- a/groot-tests/test-common/src/test/java/com/geedgenetworks/test/common/container/TestContainer.java +++ /dev/null @@ -1,36 +0,0 @@ -package com.geedgenetworks.test.common.container; - -import com.geedgenetworks.test.common.TestResource; -import org.testcontainers.containers.Container; -import org.testcontainers.containers.Network; - -import java.io.IOException; -import java.util.List; - -public interface TestContainer extends TestResource { - Network NETWORK = Network.newNetwork(); - TestContainerId identifier(); - - void executeExtraCommands(ContainerExtendedFactory extendedFactory) - throws IOException, InterruptedException; - - Container.ExecResult executeJob(String confFile) throws IOException, InterruptedException; - - Container.ExecResult executeJob(String confFile, List variables) - throws IOException, InterruptedException; - - default Container.ExecResult savepointJob(String jobId) - throws IOException, InterruptedException { - throw new UnsupportedOperationException("Not implemented"); - } - - default Container.ExecResult restoreJob(String confFile, String jobId) - throws IOException, InterruptedException { - throw new UnsupportedOperationException("Not implemented"); - } - - String getServerLogs(); - - - -} diff --git a/groot-tests/test-common/src/test/java/com/geedgenetworks/test/common/container/TestContainerId.java b/groot-tests/test-common/src/test/java/com/geedgenetworks/test/common/container/TestContainerId.java deleted file mode 100644 index e837d25..0000000 --- a/groot-tests/test-common/src/test/java/com/geedgenetworks/test/common/container/TestContainerId.java +++ /dev/null @@ -1,20 +0,0 @@ -package com.geedgenetworks.test.common.container; - -import lombok.AllArgsConstructor; -import lombok.Getter; - -import static com.geedgenetworks.test.common.container.EngineType.FLINK; -@Getter -@AllArgsConstructor -public enum TestContainerId { - FLINK_1_13(FLINK, "1.13.1"), - FLINK_1_17(FLINK, "1.17.2"); - private final EngineType engineType; - private final String version; - - @Override - public String toString() { - return engineType.toString() + ":" + version; - } - -} diff --git a/groot-tests/test-common/src/test/java/com/geedgenetworks/test/common/container/TestContainersFactory.java b/groot-tests/test-common/src/test/java/com/geedgenetworks/test/common/container/TestContainersFactory.java deleted file mode 100644 index 675ee53..0000000 --- a/groot-tests/test-common/src/test/java/com/geedgenetworks/test/common/container/TestContainersFactory.java +++ /dev/null @@ -1,7 +0,0 @@ -package com.geedgenetworks.test.common.container; - -import java.util.List; - -public interface TestContainersFactory { - List create(); -} diff --git a/groot-tests/test-common/src/test/java/com/geedgenetworks/test/common/container/TestHelper.java b/groot-tests/test-common/src/test/java/com/geedgenetworks/test/common/container/TestHelper.java deleted file mode 100644 index e343dcd..0000000 --- a/groot-tests/test-common/src/test/java/com/geedgenetworks/test/common/container/TestHelper.java +++ /dev/null @@ -1,23 +0,0 @@ -package com.geedgenetworks.test.common.container; - -import org.junit.jupiter.api.Assertions; -import org.testcontainers.containers.Container; - -import java.io.IOException; - -public class TestHelper { - private final TestContainer container; - - public TestHelper(TestContainer container) { - this.container = container; - } - - public void execute(String file) throws IOException, InterruptedException { - execute(0, file); - } - - public void execute(int exceptResult, String file) throws IOException, InterruptedException { - Container.ExecResult result = container.executeJob(file); - Assertions.assertEquals(exceptResult, result.getExitCode(), result.getStderr()); - } -} diff --git a/groot-tests/test-common/src/test/java/com/geedgenetworks/test/common/junit/AnnotationUtil.java b/groot-tests/test-common/src/test/java/com/geedgenetworks/test/common/junit/AnnotationUtil.java deleted file mode 100644 index c10e782..0000000 --- a/groot-tests/test-common/src/test/java/com/geedgenetworks/test/common/junit/AnnotationUtil.java +++ /dev/null @@ -1,37 +0,0 @@ -package com.geedgenetworks.test.common.junit; - -import com.geedgenetworks.test.common.container.TestContainer; -import com.geedgenetworks.test.common.container.TestContainerId; -import com.geedgenetworks.test.common.container.EngineType; -import lombok.AccessLevel; -import lombok.NoArgsConstructor; -import org.junit.platform.commons.util.AnnotationUtils; -import java.lang.reflect.AnnotatedElement; -import java.util.ArrayList; -import java.util.Collections; -import java.util.List; -import java.util.stream.Collectors; - - -@NoArgsConstructor(access = AccessLevel.PRIVATE) -public class AnnotationUtil { - public static List filterDisabledContainers( - List containers, AnnotatedElement annotatedElement) { - // Filters disabled containers - final List disabledContainers = new ArrayList<>(); - final List disabledEngineTypes = new ArrayList<>(); - AnnotationUtils.findAnnotation(annotatedElement, DisabledOnContainer.class) - .ifPresent( - annotation -> { - Collections.addAll(disabledContainers, annotation.value()); - Collections.addAll(disabledEngineTypes, annotation.type()); - }); - return containers.stream() - .filter(container -> !disabledContainers.contains(container.identifier())) - .filter( - container -> - !disabledEngineTypes.contains( - container.identifier().getEngineType())) - .collect(Collectors.toList()); - } -} diff --git a/groot-tests/test-common/src/test/java/com/geedgenetworks/test/common/junit/ContainerTestingExtension.java b/groot-tests/test-common/src/test/java/com/geedgenetworks/test/common/junit/ContainerTestingExtension.java deleted file mode 100644 index bad2ce4..0000000 --- a/groot-tests/test-common/src/test/java/com/geedgenetworks/test/common/junit/ContainerTestingExtension.java +++ /dev/null @@ -1,84 +0,0 @@ -package com.geedgenetworks.test.common.junit; - -import com.geedgenetworks.test.common.container.ContainerExtendedFactory; -import com.geedgenetworks.test.common.container.TestContainer; -import com.geedgenetworks.test.common.container.TestContainersFactory; -import org.junit.jupiter.api.extension.AfterAllCallback; -import org.junit.jupiter.api.extension.BeforeAllCallback; -import org.junit.jupiter.api.extension.ExtensionContext; -import org.junit.platform.commons.support.AnnotationSupport; - -import java.lang.annotation.Annotation; -import java.util.Collection; -import java.util.List; - -public class ContainerTestingExtension implements BeforeAllCallback, AfterAllCallback { - public static final ExtensionContext.Namespace TEST_RESOURCE_NAMESPACE = - ExtensionContext.Namespace.create("testResourceNamespace"); - public static final String TEST_CONTAINERS_STORE_KEY = "testContainers"; - public static final String TEST_EXTENDED_FACTORY_STORE_KEY = "testContainerExtendedFactory"; - - @Override - public void beforeAll(ExtensionContext context) throws Exception { - - List containerExtendedFactories = - AnnotationSupport.findAnnotatedFieldValues( - context.getRequiredTestInstance(), - TestContainerExtension.class, - ContainerExtendedFactory.class); - checkAtMostOneAnnotationField(containerExtendedFactories, TestContainerExtension.class); - ContainerExtendedFactory containerExtendedFactory = container -> {}; - if (!containerExtendedFactories.isEmpty()) { - containerExtendedFactory = containerExtendedFactories.get(0); - } - context.getStore(TEST_RESOURCE_NAMESPACE) - .put(TEST_EXTENDED_FACTORY_STORE_KEY, containerExtendedFactory); - - List containersFactories = - AnnotationSupport.findAnnotatedFieldValues( - context.getRequiredTestInstance(), - TestContainers.class, - TestContainersFactory.class); - - checkExactlyOneAnnotatedField(containersFactories, TestContainers.class); - - List testContainers = - AnnotationUtil.filterDisabledContainers( - containersFactories.get(0).create(), - context.getRequiredTestInstance().getClass()); - context.getStore(TEST_RESOURCE_NAMESPACE).put(TEST_CONTAINERS_STORE_KEY, testContainers); - - } - - @Override - public void afterAll(ExtensionContext context) throws Exception { - context.getStore(TEST_RESOURCE_NAMESPACE).remove(TEST_CONTAINERS_STORE_KEY); - } - - - - - private void checkExactlyOneAnnotatedField( - Collection fields, Class annotation) { - checkAtMostOneAnnotationField(fields, annotation); - checkAtLeastOneAnnotationField(fields, annotation); - } - - private void checkAtLeastOneAnnotationField( - Collection fields, Class annotation) { - if (fields.isEmpty()) { - throw new IllegalStateException( - String.format( - "No fields are annotated with '@%s'", annotation.getSimpleName())); - } - } - private void checkAtMostOneAnnotationField( - Collection fields, Class annotation) { - if (fields.size() > 1) { - throw new IllegalStateException( - String.format( - "Multiple fields are annotated with '@%s'", - annotation.getSimpleName())); - } - } -} diff --git a/groot-tests/test-common/src/test/java/com/geedgenetworks/test/common/junit/DisabledOnContainer.java b/groot-tests/test-common/src/test/java/com/geedgenetworks/test/common/junit/DisabledOnContainer.java deleted file mode 100644 index 3c4e655..0000000 --- a/groot-tests/test-common/src/test/java/com/geedgenetworks/test/common/junit/DisabledOnContainer.java +++ /dev/null @@ -1,22 +0,0 @@ -package com.geedgenetworks.test.common.junit; - -import com.geedgenetworks.test.common.container.EngineType; -import com.geedgenetworks.test.common.container.TestContainerId; - -import java.lang.annotation.*; - -@Target({ElementType.TYPE, ElementType.METHOD}) -@Retention(RetentionPolicy.RUNTIME) -@Inherited -public @interface DisabledOnContainer { - TestContainerId[] value(); - EngineType[] type() default {}; - - /** - * Custom reason to provide if the test container is disabled. - * - *

If a custom reason is supplied, it will be combined with the default reason for this - * annotation. If a custom reason is not supplied, the default reason will be used. - */ - String disabledReason() default ""; -} diff --git a/groot-tests/test-common/src/test/java/com/geedgenetworks/test/common/junit/TestCaseInvocationContextProvider.java b/groot-tests/test-common/src/test/java/com/geedgenetworks/test/common/junit/TestCaseInvocationContextProvider.java deleted file mode 100644 index 01f29bf..0000000 --- a/groot-tests/test-common/src/test/java/com/geedgenetworks/test/common/junit/TestCaseInvocationContextProvider.java +++ /dev/null @@ -1,114 +0,0 @@ -package com.geedgenetworks.test.common.junit; - -import com.geedgenetworks.test.common.container.ContainerExtendedFactory; -import com.geedgenetworks.test.common.container.TestContainer; -import lombok.SneakyThrows; -import lombok.extern.slf4j.Slf4j; -import org.junit.jupiter.api.extension.*; - -import java.util.Arrays; -import java.util.List; -import java.util.stream.Stream; -import static com.geedgenetworks.test.common.junit.ContainerTestingExtension.TEST_CONTAINERS_STORE_KEY; -import static com.geedgenetworks.test.common.junit.ContainerTestingExtension.TEST_EXTENDED_FACTORY_STORE_KEY; -import static com.geedgenetworks.test.common.junit.ContainerTestingExtension.TEST_RESOURCE_NAMESPACE; -@Slf4j -public class TestCaseInvocationContextProvider implements TestTemplateInvocationContextProvider { - @Override - public boolean supportsTestTemplate(ExtensionContext context) { - // Only support test cases with TestContainer as parameter - Class[] parameterTypes = context.getRequiredTestMethod().getParameterTypes(); - return parameterTypes.length == 1 - && Arrays.stream(parameterTypes).anyMatch(TestContainer.class::isAssignableFrom); - } - - @Override - public Stream provideTestTemplateInvocationContexts( - ExtensionContext context) { - List testContainers = - AnnotationUtil.filterDisabledContainers( - (List) - context.getStore(TEST_RESOURCE_NAMESPACE) - .get(TEST_CONTAINERS_STORE_KEY), - context.getRequiredTestMethod()); - - ContainerExtendedFactory containerExtendedFactory = - (ContainerExtendedFactory) - context.getStore(TEST_RESOURCE_NAMESPACE) - .get(TEST_EXTENDED_FACTORY_STORE_KEY); - - int containerAmount = testContainers.size(); - return testContainers.stream() - .map( - testContainer -> - new TestResourceProvidingInvocationContext( - testContainer, containerExtendedFactory, containerAmount)); - } - static class TestResourceProvidingInvocationContext implements TestTemplateInvocationContext { - private final TestContainer testContainer; - private final ContainerExtendedFactory containerExtendedFactory; - private final Integer containerAmount; - - public TestResourceProvidingInvocationContext( - TestContainer testContainer, - ContainerExtendedFactory containerExtendedFactory, - int containerAmount) { - this.testContainer = testContainer; - this.containerExtendedFactory = containerExtendedFactory; - this.containerAmount = containerAmount; - } - - @Override - public String getDisplayName(int invocationIndex) { - return String.format( - "TestContainer(%s/%s): %s", - invocationIndex, containerAmount, testContainer.identifier()); - } - - @Override - public List getAdditionalExtensions() { - return Arrays.asList( - // Extension for injecting parameters - new TestContainerResolver(testContainer, containerExtendedFactory), - // Extension for closing test container - (AfterTestExecutionCallback) - ignore -> { - testContainer.tearDown(); - log.info( - "The TestContainer[{}] is closed.", - testContainer.identifier()); - }); - } - } - - private static class TestContainerResolver implements ParameterResolver { - - private final TestContainer testContainer; - private final ContainerExtendedFactory containerExtendedFactory; - - private TestContainerResolver( - TestContainer testContainer, ContainerExtendedFactory containerExtendedFactory) { - this.testContainer = testContainer; - this.containerExtendedFactory = containerExtendedFactory; - } - - @Override - public boolean supportsParameter( - ParameterContext parameterContext, ExtensionContext extensionContext) - throws ParameterResolutionException { - return TestContainer.class.isAssignableFrom(parameterContext.getParameter().getType()); - } - - @SneakyThrows - @Override - public Object resolveParameter( - ParameterContext parameterContext, ExtensionContext extensionContext) - throws ParameterResolutionException { - testContainer.startUp(); - testContainer.executeExtraCommands(containerExtendedFactory); - log.info("The TestContainer[{}] is running.", testContainer.identifier()); - return this.testContainer; - } - } - -} diff --git a/groot-tests/test-common/src/test/java/com/geedgenetworks/test/common/junit/TestContainerExtension.java b/groot-tests/test-common/src/test/java/com/geedgenetworks/test/common/junit/TestContainerExtension.java deleted file mode 100644 index 0c18003..0000000 --- a/groot-tests/test-common/src/test/java/com/geedgenetworks/test/common/junit/TestContainerExtension.java +++ /dev/null @@ -1,12 +0,0 @@ -package com.geedgenetworks.test.common.junit; - -import java.lang.annotation.ElementType; -import java.lang.annotation.Retention; -import java.lang.annotation.RetentionPolicy; -import java.lang.annotation.Target; - -@Target(ElementType.FIELD) -@Retention(RetentionPolicy.RUNTIME) -public @interface TestContainerExtension { - -} diff --git a/groot-tests/test-common/src/test/java/com/geedgenetworks/test/common/junit/TestContainers.java b/groot-tests/test-common/src/test/java/com/geedgenetworks/test/common/junit/TestContainers.java deleted file mode 100644 index c7ffa17..0000000 --- a/groot-tests/test-common/src/test/java/com/geedgenetworks/test/common/junit/TestContainers.java +++ /dev/null @@ -1,11 +0,0 @@ -package com.geedgenetworks.test.common.junit; - -import java.lang.annotation.ElementType; -import java.lang.annotation.Retention; -import java.lang.annotation.RetentionPolicy; -import java.lang.annotation.Target; - -@Target(ElementType.FIELD) -@Retention(RetentionPolicy.RUNTIME) -public @interface TestContainers { -} diff --git a/groot-tests/test-common/src/test/java/com/geedgenetworks/test/common/junit/TestLoggerExtension.java b/groot-tests/test-common/src/test/java/com/geedgenetworks/test/common/junit/TestLoggerExtension.java deleted file mode 100644 index e48135c..0000000 --- a/groot-tests/test-common/src/test/java/com/geedgenetworks/test/common/junit/TestLoggerExtension.java +++ /dev/null @@ -1,60 +0,0 @@ -package com.geedgenetworks.test.common.junit; - -import org.junit.jupiter.api.extension.BeforeEachCallback; -import org.junit.jupiter.api.extension.ExtensionContext; -import org.junit.jupiter.api.extension.TestWatcher; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.io.PrintWriter; -import java.io.StringWriter; - -public class TestLoggerExtension implements TestWatcher, BeforeEachCallback { - private static final Logger LOG = LoggerFactory.getLogger(TestLoggerExtension.class); - @Override - public void beforeEach(ExtensionContext context) { - LOG.info( - "\n================================================================================" - + "\nTest {}.{} is running." - + "\n--------------------------------------------------------------------------------", - context.getRequiredTestClass().getCanonicalName(), - context.getRequiredTestMethod().getName()); - } - - @Override - public void testSuccessful(ExtensionContext context) { - LOG.info( - "\n--------------------------------------------------------------------------------" - + "\nTest {}.{} successfully run." - + "\n================================================================================", - context.getRequiredTestClass().getCanonicalName(), - context.getRequiredTestMethod().getName()); - } - - @Override - public void testFailed(ExtensionContext context, Throwable cause) { - LOG.error( - "\n--------------------------------------------------------------------------------" - + "\nTest {}.{} failed with:\n{}" - + "\n================================================================================", - context.getRequiredTestClass().getCanonicalName(), - context.getRequiredTestMethod().getName(), - exceptionToString(cause)); - } - - private static String exceptionToString(Throwable t) { - if (t == null) { - return "(null)"; - } - - try { - StringWriter stm = new StringWriter(); - PrintWriter wrt = new PrintWriter(stm); - t.printStackTrace(wrt); - wrt.close(); - return stm.toString(); - } catch (Throwable ignored) { - return t.getClass().getName() + " (error while printing stack trace)"; - } - } -} diff --git a/groot-tests/test-common/src/test/resources/grootstream.yaml b/groot-tests/test-common/src/test/resources/grootstream.yaml deleted file mode 100644 index 0def444..0000000 --- a/groot-tests/test-common/src/test/resources/grootstream.yaml +++ /dev/null @@ -1,14 +0,0 @@ -grootstream: - knowledge_base: - - name: tsg_ip_asn - fs_type: local - fs_path: /tmp/grootstream/config/dat/ - files: - - asn_builtin.mmdb - - name: tsg_ip_location - fs_type: local - fs_path: /tmp/grootstream/config/dat/ - files: - - ip_builtin.mmdb - properties: - scheduler.knowledge_base.update.interval.minutes: 5 diff --git a/groot-tests/test-common/src/test/resources/log4j2.properties b/groot-tests/test-common/src/test/resources/log4j2.properties deleted file mode 100644 index fb3ac1e..0000000 --- a/groot-tests/test-common/src/test/resources/log4j2.properties +++ /dev/null @@ -1,42 +0,0 @@ -################################################################################ -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -################################################################################ - -rootLogger.level = INFO - -rootLogger.appenderRef.consoleStdout.ref = consoleStdoutAppender -rootLogger.appenderRef.consoleStderr.ref = consoleStderrAppender - -appender.consoleStdout.name = consoleStdoutAppender -appender.consoleStdout.type = CONSOLE -appender.consoleStdout.target = SYSTEM_OUT -appender.consoleStdout.layout.type = PatternLayout -appender.consoleStdout.layout.pattern = %d{yyyy-MM-dd HH:mm:ss,SSS} %-5p %c [%t] - %m%n -appender.consoleStdout.filter.acceptLtWarn.type = ThresholdFilter -appender.consoleStdout.filter.acceptLtWarn.level = WARN -appender.consoleStdout.filter.acceptLtWarn.onMatch = DENY -appender.consoleStdout.filter.acceptLtWarn.onMismatch = ACCEPT - -appender.consoleStderr.name = consoleStderrAppender -appender.consoleStderr.type = CONSOLE -appender.consoleStderr.target = SYSTEM_ERR -appender.consoleStderr.layout.type = PatternLayout -appender.consoleStderr.layout.pattern = %d{yyyy-MM-dd HH:mm:ss,SSS} %-5p %c [%t] - %m%n -appender.consoleStderr.filter.acceptGteWarn.type = ThresholdFilter -appender.consoleStderr.filter.acceptGteWarn.level = WARN -appender.consoleStderr.filter.acceptGteWarn.onMatch = ACCEPT -appender.consoleStderr.filter.acceptGteWarn.onMismatch = DENY diff --git a/groot-tests/test-e2e-base/pom.xml b/groot-tests/test-e2e-base/pom.xml deleted file mode 100644 index 4a664b9..0000000 --- a/groot-tests/test-e2e-base/pom.xml +++ /dev/null @@ -1,49 +0,0 @@ - - - 4.0.0 - - com.geedgenetworks - groot-tests - ${revision} - - - test-e2e-base - Groot : Tests : E2E : Base - - - - - - - - com.geedgenetworks - test-common - ${project.version} - test-jar - test - - - - - - - org.apache.maven.plugins - maven-jar-plugin - ${maven-jar-plugin.version} - - false - - - - - test-jar - - - - - - - - \ No newline at end of file diff --git a/groot-tests/test-e2e-base/src/test/java/com/geedgenetworks/test/e2e/base/EnvParameterIT.java b/groot-tests/test-e2e-base/src/test/java/com/geedgenetworks/test/e2e/base/EnvParameterIT.java deleted file mode 100644 index aa00d8d..0000000 --- a/groot-tests/test-e2e-base/src/test/java/com/geedgenetworks/test/e2e/base/EnvParameterIT.java +++ /dev/null @@ -1,198 +0,0 @@ -package com.geedgenetworks.test.e2e.base; - -import com.alibaba.fastjson2.JSON; -import com.alibaba.fastjson2.TypeReference; -import com.alibaba.nacos.client.naming.utils.CollectionUtils; -import com.geedgenetworks.test.common.TestSuiteBase; -import com.geedgenetworks.test.common.container.AbstractTestFlinkContainer; -import com.geedgenetworks.test.common.container.ContainerExtendedFactory; -import com.geedgenetworks.test.common.container.TestContainerId; -import com.geedgenetworks.test.common.junit.DisabledOnContainer; -import com.geedgenetworks.test.common.junit.TestContainerExtension; -import lombok.extern.slf4j.Slf4j; -import org.junit.jupiter.api.Assertions; -import org.junit.jupiter.api.TestTemplate; -import org.testcontainers.containers.Container; - -import java.io.IOException; -import java.util.List; -import java.util.Map; -import java.util.concurrent.CompletableFuture; -import java.util.concurrent.TimeUnit; -import java.util.concurrent.atomic.AtomicReference; - -import static org.awaitility.Awaitility.await; - -@Slf4j -@DisabledOnContainer( - value = {TestContainerId.FLINK_1_17}, - type = {}, - disabledReason = "only flink adjusts the parameter configuration rules") -public class EnvParameterIT extends TestSuiteBase { - @TestContainerExtension - protected final ContainerExtendedFactory extendedFactory = - container -> { - Container.ExecResult extraCommands = - container.execInContainer( - "bash", - "-c", - "mkdir -p /tmp/grootstream && chown -R flink /tmp/grootstream"); - Assertions.assertEquals(0, extraCommands.getExitCode(), extraCommands.getStderr()); - }; - - @TestTemplate - public void testGeneralEnvParameter(AbstractTestFlinkContainer container) - throws IOException, InterruptedException { - genericTest( - "/test_env_parameter_inline_to_print.yaml", container); - } - - - public void genericTest(String configPath, AbstractTestFlinkContainer container) - throws IOException, InterruptedException { - CompletableFuture.supplyAsync( - () -> { - try { - return container.executeJob(configPath); - } catch (Exception e) { - log.error("Commit task exception :" + e.getMessage()); - throw new RuntimeException(e); - } - }); - // wait obtain job id - AtomicReference jobId = new AtomicReference<>(); - await().atMost(300000, TimeUnit.MILLISECONDS) - .untilAsserted( - () -> { - Map jobInfo = JSON.parseObject(container.executeJobManagerInnerCommand( - "curl http://localhost:8081/jobs/overview"), new TypeReference>() { - }); - List> jobs = - (List>) jobInfo.get("jobs"); - if (!CollectionUtils.isEmpty(jobs)) { - jobId.set(jobs.get(0).get("jid").toString()); - } - Assertions.assertNotNull(jobId.get()); - }); - - // obtain job info - AtomicReference> jobInfoReference = new AtomicReference<>(); - await().atMost(60000, TimeUnit.MILLISECONDS) - .untilAsserted( - () -> { - Map jobInfo = JSON.parseObject( container.executeJobManagerInnerCommand( - String.format( - "curl http://localhost:8081/jobs/%s", - jobId.get())), new TypeReference>() { - }); - - // wait the job initialization is complete and enters the Running state - if (null != jobInfo && "RUNNING".equals(jobInfo.get("state"))) { - jobInfoReference.set(jobInfo); - } - Assertions.assertNotNull(jobInfoReference.get()); - }); - Map jobInfo = jobInfoReference.get(); - - // obtain execution configuration - Map jobConfig = JSON.parseObject(container.executeJobManagerInnerCommand( - String.format( - "curl http://localhost:8081/jobs/%s/config", jobId.get())), new TypeReference>() { - }); - - Map executionConfig = - (Map) jobConfig.get("execution-config"); - - // obtain checkpoint configuration - Map checkpointConfig = - JSON.parseObject(container.executeJobManagerInnerCommand( - String.format( - "curl http://localhost:8081/jobs/%s/checkpoints/config", jobId.get())), new TypeReference>() { - }); - - // obtain checkpoint storage - AtomicReference> completedCheckpointReference = new AtomicReference<>(); - await().atMost(60000, TimeUnit.MILLISECONDS) - .untilAsserted( - () -> { - Map checkpointsInfo = - JSON.parseObject(container.executeJobManagerInnerCommand( - String.format( - "curl http://localhost:8081/jobs/%s/checkpoints", jobId.get())), new TypeReference>() { - }); - Map latestCheckpoint = - (Map) checkpointsInfo.get("latest"); - // waiting for at least one checkpoint trigger - if (null != latestCheckpoint) { - completedCheckpointReference.set( - (Map) latestCheckpoint.get("completed")); - Assertions.assertNotNull(completedCheckpointReference.get()); - } - }); - /** - * adjust the configuration of this {@link - * com.geedgenetworks.bootstrap.execution.ExecutionConfigKeyName} to use the 'flink.' and the - * flink parameter name, and check whether the configuration takes effect - */ - // PARALLELISM - int parallelism = (int) executionConfig.get("job-parallelism"); - Assertions.assertEquals(1, parallelism); - - // MAX_PARALLELISM - int maxParallelism = (int) jobInfo.get("maxParallelism"); - Assertions.assertEquals(5, maxParallelism); - - // CHECKPOINT_INTERVAL - int interval = (int) checkpointConfig.get("interval"); - Assertions.assertEquals(10000, interval); - - // CHECKPOINT_MODE - String mode = checkpointConfig.get("mode").toString(); - Assertions.assertEquals("exactly_once", mode); - - // CHECKPOINT_TIMEOUT - int checkpointTimeout = (int) checkpointConfig.get("timeout"); - Assertions.assertEquals(1200000, checkpointTimeout); - - // CHECKPOINT_DATA_URI - String externalPath = completedCheckpointReference.get().get("external_path").toString(); - Assertions.assertTrue(externalPath.startsWith("file:/tmp/grootstream/checkpoints")); - - // MAX_CONCURRENT_CHECKPOINTS - int maxConcurrent = (int) checkpointConfig.get("max_concurrent"); - Assertions.assertEquals(2, maxConcurrent); - - // CHECKPOINT_CLEANUP_MODE - Map externalizationMap = - (Map) checkpointConfig.get("externalization"); - boolean externalization = (boolean) externalizationMap.get("delete_on_cancellation"); - Assertions.assertTrue(externalization); - - // MIN_PAUSE_BETWEEN_CHECKPOINTS - int minPause = (int) checkpointConfig.get("min_pause"); - Assertions.assertEquals(100, minPause); - - // FAIL_ON_CHECKPOINTING_ERRORS - int tolerableFailedCheckpoints = (int) checkpointConfig.get("tolerable_failed_checkpoints"); - Assertions.assertEquals(5, tolerableFailedCheckpoints); - - // RESTART_STRATEGY / because the restart strategy is fixed-delay in config file, so don't - // check failure-rate - String restartStrategy = executionConfig.get("restart-strategy").toString(); - Assertions.assertTrue(restartStrategy.contains("fixed delay")); - - // RESTART_ATTEMPTS - Assertions.assertTrue(restartStrategy.contains("2 restart attempts")); - - // RESTART_DELAY_BETWEEN_ATTEMPTS - Assertions.assertTrue(restartStrategy.contains("fixed delay (1000 ms)")); - - // STATE_BACKEND - String stateBackend = checkpointConfig.get("state_backend").toString(); - Assertions.assertTrue(stateBackend.contains("RocksDBStateBackend")); - } - - - -} - diff --git a/groot-tests/test-e2e-base/src/test/java/com/geedgenetworks/test/e2e/base/Flink13Container.java b/groot-tests/test-e2e-base/src/test/java/com/geedgenetworks/test/e2e/base/Flink13Container.java deleted file mode 100644 index 43c6eeb..0000000 --- a/groot-tests/test-e2e-base/src/test/java/com/geedgenetworks/test/e2e/base/Flink13Container.java +++ /dev/null @@ -1,34 +0,0 @@ -package com.geedgenetworks.test.e2e.base; - -import lombok.extern.slf4j.Slf4j; -import org.junit.jupiter.api.AfterAll; -import org.junit.jupiter.api.BeforeAll; -import org.junit.jupiter.api.TestInstance; -import org.testcontainers.containers.Container; - -import java.io.IOException; - -@Slf4j -@TestInstance(TestInstance.Lifecycle.PER_CLASS) -public class Flink13Container extends com.geedgenetworks.test.common.container.Flink13Container { - @Override - @BeforeAll - public void startUp() throws Exception { - super.startUp(); - log.info("The TestContainer[{}] is running.", identifier()); - } - - @Override - @AfterAll - public void tearDown() throws Exception { - super.tearDown(); - log.info("The TestContainer[{}] is closed.", identifier()); - } - - public Container.ExecResult executeGrootStreamJob(String confFile) - throws IOException, InterruptedException { - return executeJob(confFile); - } - - -} diff --git a/groot-tests/test-e2e-base/src/test/java/com/geedgenetworks/test/e2e/base/InlineToPrintIT.java b/groot-tests/test-e2e-base/src/test/java/com/geedgenetworks/test/e2e/base/InlineToPrintIT.java deleted file mode 100644 index fdba36f..0000000 --- a/groot-tests/test-e2e-base/src/test/java/com/geedgenetworks/test/e2e/base/InlineToPrintIT.java +++ /dev/null @@ -1,153 +0,0 @@ -package com.geedgenetworks.test.e2e.base; - -import com.alibaba.fastjson2.JSON; -import com.alibaba.fastjson2.TypeReference; -import com.alibaba.nacos.client.naming.utils.CollectionUtils; -import com.geedgenetworks.test.common.TestSuiteBase; -import com.geedgenetworks.test.common.container.AbstractTestFlinkContainer; -import com.geedgenetworks.test.common.container.TestContainerId; -import com.geedgenetworks.test.common.junit.DisabledOnContainer; -import lombok.extern.slf4j.Slf4j; -import org.apache.commons.lang3.StringUtils; -import org.junit.jupiter.api.Assertions; -import org.junit.jupiter.api.TestTemplate; -import java.io.IOException; -import java.util.List; -import java.util.Map; -import java.util.concurrent.CompletableFuture; -import java.util.concurrent.TimeUnit; -import java.util.concurrent.atomic.AtomicReference; - -import static org.awaitility.Awaitility.await; - -@Slf4j -@DisabledOnContainer( - value = {TestContainerId.FLINK_1_17}, - type = {}, - disabledReason = "Only flink adjusts the parameter configuration rules") -public class InlineToPrintIT extends TestSuiteBase { - - - @TestTemplate - public void testJobExecution(AbstractTestFlinkContainer container) throws IOException, InterruptedException { - CompletableFuture.supplyAsync( - () -> { - try { - return container.executeJob("/inline_to_print.yaml"); - } catch (Exception e) { - throw new RuntimeException(e); - } - }); - - AtomicReference taskMangerID = new AtomicReference<>(); - - await().atMost(300000, TimeUnit.MILLISECONDS) - .untilAsserted( - () -> { - Map taskMangerInfo = JSON.parseObject(container.executeJobManagerInnerCommand( - "curl http://localhost:8081/taskmanagers"), new TypeReference>() { - }); - - @SuppressWarnings("unchecked") - List> taskManagers = - (List>) taskMangerInfo.get("taskmanagers"); - - if (!CollectionUtils.isEmpty(taskManagers)) { - taskMangerID.set(taskManagers.get(0).get("id").toString()); - } - Assertions.assertNotNull(taskMangerID.get()); - }); - - AtomicReference jobId = new AtomicReference<>(); - await().atMost(300000, TimeUnit.MILLISECONDS) - .untilAsserted( - () -> { - Map jobInfo = JSON.parseObject(container.executeJobManagerInnerCommand( - "curl http://localhost:8081/jobs/overview"), new TypeReference>() { - }); - @SuppressWarnings("unchecked") - List> jobs = - (List>) jobInfo.get("jobs"); - if (!CollectionUtils.isEmpty(jobs)) { - jobId.set(jobs.get(0).get("jid").toString()); - } - Assertions.assertNotNull(jobId.get()); - }); - - //Obtain job metrics - AtomicReference>> jobNumRestartsReference = new AtomicReference<>(); - await().atMost(60000, TimeUnit.MILLISECONDS) - .untilAsserted( - () -> { - Thread.sleep(5000); - String result = container.executeJobManagerInnerCommand( - String.format( - "curl http://localhost:8081/jobs/%s/metrics?get=numRestarts", jobId.get())); - List> jobNumRestartsInfo = JSON.parseObject(result, new TypeReference>>() { - }); - if (!CollectionUtils.isEmpty(jobNumRestartsInfo)) { - jobNumRestartsReference.set(jobNumRestartsInfo); - } - - Assertions.assertNotNull(jobNumRestartsReference.get()); - - }); - - - } - - @TestTemplate - public void testUserDefinedJobVariables(AbstractTestFlinkContainer container) throws IOException, InterruptedException { - - CompletableFuture.supplyAsync( - () -> { - try { - List variables = List.of( - "hos.bucket.name.rtp_file=cli_job_level_traffic_rtp_file_bucket", - "hos.bucket.name.http_file=cli_job_level_traffic_http_file_bucket"); - return container.executeJob("/inline_to_print.yaml", variables); - } catch (Exception e) { - log.error("Commit task exception : {} ", e.getMessage()); - throw new RuntimeException(e); - } - }); - - - await().atMost(300000, TimeUnit.MILLISECONDS) - .untilAsserted( - () -> { - String logs = container.getServerLogs(); - Assertions.assertTrue(StringUtils.countMatches(logs, "cli_job_level_traffic_rtp_file_bucket/test_pcap_file") > 10); - Assertions.assertTrue(StringUtils.countMatches(logs, "cli_job_level_traffic_http_file_bucket/test_http_req_file") > 10); - // Test server_ip filter -> output logs not contains 4.4.4.4 of server_ip - Assertions.assertTrue(StringUtils.containsIgnoreCase(logs, "PrintSinkFunction ") && !StringUtils.contains(logs, "\"server_ip\":\"4.4.4.4\"")); - // Test Drop function -> output logs not contains 5.5.5.5 of server_ip - Assertions.assertTrue(StringUtils.containsIgnoreCase(logs, "PrintSinkFunction ") && !StringUtils.contains(logs, "\"server_ip\":\"5.5.5.5\"")); - - // Output logs contains server_asn - Assertions.assertTrue(StringUtils.containsIgnoreCase(logs, "PrintSinkFunction ") && StringUtils.contains(logs, "\"server_asn\"")); - // Output logs contains server_domain - Assertions.assertTrue(StringUtils.containsIgnoreCase(logs, "PrintSinkFunction ") && StringUtils.contains(logs, "\"server_domain\"")); - - // Output logs contains server_country - Assertions.assertTrue(StringUtils.containsIgnoreCase(logs, "PrintSinkFunction ") && StringUtils.contains(logs, "\"server_country\"")); - // Output logs contains mail_attachment_name equals 中文测试 - Assertions.assertTrue(StringUtils.containsIgnoreCase(logs, "PrintSinkFunction ") && StringUtils.contains(logs, "\"mail_attachment_name\":\"中文测试\"")); - // Test EVAL function -> output logs contains direction equals c2s - Assertions.assertTrue(StringUtils.containsIgnoreCase(logs, "PrintSinkFunction ") && StringUtils.contains(logs, "\"direction\":\"c2s\"")); - // Test JSON Extract function -> output logs contains device_group equals XXG-TSG-BJ - Assertions.assertTrue(StringUtils.containsIgnoreCase(logs, "PrintSinkFunction ") && StringUtils.contains(logs, "\"device_group\":\"XXG-TSG-BJ\"")); - - Assertions.assertTrue(StringUtils.containsIgnoreCase(logs, "PrintSinkFunction ") && StringUtils.contains(logs, "client_ip_list")); - - - - - - }); - - - - } - -} diff --git a/groot-tests/test-e2e-base/src/test/java/com/geedgenetworks/test/e2e/base/TestUtils.java b/groot-tests/test-e2e-base/src/test/java/com/geedgenetworks/test/e2e/base/TestUtils.java deleted file mode 100644 index 4aa2dc6..0000000 --- a/groot-tests/test-e2e-base/src/test/java/com/geedgenetworks/test/e2e/base/TestUtils.java +++ /dev/null @@ -1,21 +0,0 @@ -package com.geedgenetworks.test.e2e.base; - -import lombok.extern.slf4j.Slf4j; - -import java.io.File; - -@Slf4j -public class TestUtils { - public static String getResource(String confFile) { - return System.getProperty("user.dir") - + File.separator - + "src" - + File.separator - + "test" - + File.separator - + "resources" - + File.separator - + confFile; - } - -} diff --git a/groot-tests/test-e2e-base/src/test/resources/inline_to_print.yaml b/groot-tests/test-e2e-base/src/test/resources/inline_to_print.yaml deleted file mode 100644 index abb42a4..0000000 --- a/groot-tests/test-e2e-base/src/test/resources/inline_to_print.yaml +++ /dev/null @@ -1,232 +0,0 @@ -sources: - inline_source: - type: inline - properties: - data: [{"tcp_rtt_ms":128,"decoded_as":"DNS","rtp_pcap_path":"test_pcap_file", "security_rule_id_list": [1,10,100,300], "http_request_body":"test_http_req_file","http_version":"http1","http_request_line":"GET / HTTP/1.1","http_host":"www.ct.cn","http_url":"www.ct.cn/","ssl_sni":"www.ct.cn", "http_user_agent":"curl/8.0.1","http_status_code":200,"http_response_line":"HTTP/1.1 200 OK","http_response_content_type":"text/html; charset=UTF-8","http_response_latency_ms":31,"http_session_duration_ms":5451,"in_src_mac":"ba:bb:a7:3c:67:1c","in_dest_mac":"86:dd:7a:8f:ae:e2","out_src_mac":"86:dd:7a:8f:ae:e2","out_dest_mac":"ba:bb:a7:3c:67:1c","tcp_client_isn":678677906,"tcp_server_isn":1006700307,"flags":8192, "address_type":4,"mail_subject":"中文标题测试","mail_attachment_name":"5Lit5paH5rWL6K+V","mail_attachment_name_charset": "utf8","device_tag": "{\"tags\":[{\"tag\":\"data_center\",\"value\":\"XXG-TSG-BJ\"},{\"tag\":\"device_group\",\"value\":\"XXG-TSG-BJ\"}]}", "client_ip":"192.11.22.22","server_ip":"8.8.8.8","client_port":42751,"server_port":80,"in_link_id":65535,"out_link_id":65535,"start_timestamp_ms":1703646546127,"end_timestamp_ms":1703646551702,"duration_ms":5575,"sent_pkts":97,"sent_bytes":5892,"received_pkts":250,"received_bytes":333931},{"tcp_rtt_ms":256,"decoded_as":"HTTP","http_version":"http1","http_request_line":"GET / HTTP/1.1","http_host":"www.abc.cn","http_url":"www.cabc.cn/","http_user_agent":"curl/8.0.1","http_status_code":200,"http_response_line":"HTTP/1.1 200 OK","http_response_content_type":"text/html; charset=UTF-8","http_response_latency_ms":31,"http_session_duration_ms":5451,"in_src_mac":"ba:bb:a7:3c:67:1c","in_dest_mac":"86:dd:7a:8f:ae:e2","out_src_mac":"86:dd:7a:8f:ae:e2","out_dest_mac":"ba:bb:a7:3c:67:1c","tcp_client_isn":678677906,"tcp_server_isn":1006700307,"address_type":4,"client_ip":"192.168.10.198","mail_subject":"中文标题测试","server_ip":"4.4.4.4","client_port":42751,"server_port":80,"in_link_id":65535,"out_link_id":65535,"start_timestamp_ms":1703646546127,"end_timestamp_ms":1703646551702,"duration_ms":2575,"sent_pkts":197,"sent_bytes":5892,"received_pkts":350,"received_bytes":533931},{"tcp_rtt_ms":256,"decoded_as":"HTTP","http_version":"http1","mail_subject":"english subject test","http_request_line":"GET / HTTP/1.1","http_host":"www.5555.com","http_url":"www.5555.com/","http_user_agent":"curl/8.0.1","http_status_code":200,"http_response_line":"HTTP/1.1 200 OK","http_response_content_type":"text/html; charset=UTF-8","http_response_latency_ms":31,"http_session_duration_ms":5451,"in_src_mac":"ba:bb:a7:3c:67:1c","in_dest_mac":"86:dd:7a:8f:ae:e2","out_src_mac":"86:dd:7a:8f:ae:e2","out_dest_mac":"ba:bb:a7:3c:67:1c","tcp_client_isn":678677906,"tcp_server_isn":1006700307,"address_type":4,"client_ip":"192.168.10.1","server_ip":"5.5.5.5","client_port":42751,"server_port":53,"in_link_id":65535,"out_link_id":65535,"start_timestamp_ms":1703646546127,"end_timestamp_ms":1703646551702,"duration_ms":2575,"sent_pkts":197,"sent_bytes":5892,"received_pkts":350,"received_bytes":533931},{"tcp_rtt_ms":256,"decoded_as":"HTTP","http_version":"http1","http_request_line":"GET / HTTP/1.1","http_host":"www.6666.cn","http_url":"www.6666.cn/","http_user_agent":"curl/8.0.1","http_status_code":200,"http_response_line":"HTTP/1.1 200 OK","http_response_content_type":"text/html; charset=UTF-8","http_response_latency_ms":31,"http_session_duration_ms":5451,"in_src_mac":"ba:bb:a7:3c:67:1c","in_dest_mac":"86:dd:7a:8f:ae:e2","mail_subject":"中文标题测试","out_src_mac":"86:dd:7a:8f:ae:e2","out_dest_mac":"ba:bb:a7:3c:67:1c","tcp_client_isn":678677906,"tcp_server_isn":1006700307,"address_type":4,"client_ip":"192.168.100.1","server_ip":"6.6.6.6","client_port":42751,"server_port":53,"in_link_id":65535,"out_link_id":65535,"start_timestamp_ms":1703646546127,"end_timestamp_ms":1703646551702,"duration_ms":2575,"sent_pkts":197,"sent_bytes":5892,"received_pkts":350,"received_bytes":533931}] - format: json - json.ignore.parse.errors: false - -filters: - server_ip_filter: - type: filter - properties: - expression: event.server_ip != '4.4.4.4' - -splits: - decoded_as_split: - type: split - rules: - - tag: http_tag - expression: event.decoded_as == 'HTTP' - - tag: dns_tag - expression: event.decoded_as == 'DNS' - - -processing_pipelines: - projection_processor: - type: projection - remove_fields: [http_request_line, http_response_line, http_response_content_type] - functions: - - - function: DROP - filter: event.server_ip == '5.5.5.5' - - - function: SNOWFLAKE_ID - output_fields: [ log_id ] - parameters: - data_center_id_num: 1 - - - function: UUID - output_fields: [ log_uuid ] - - - function: UUIDv5 - lookup_fields: [ client_ip, server_ip ] - output_fields: [ ip_uuid ] - parameters: - namespace: NAMESPACE_IP - - function: UUIDv7 - output_fields: [ log_uuid_v7 ] - - - function: ASN_LOOKUP - lookup_fields: [ server_ip ] - output_fields: [ server_asn ] - parameters: - kb_name: tsg_ip_asn - option: IP_TO_ASN - - - function: GEOIP_LOOKUP - lookup_fields: [ server_ip ] - output_fields: [ ] - parameters: - kb_name: tsg_ip_location - option: IP_TO_OBJECT - geolocation_field_mapping: - COUNTRY: server_country - PROVINCE: server_super_administrative_area - CITY: server_administrative_area - LONGITUDE: server_longitude - LATITUDE: server_latitude - ISP: server_isp - ORGANIZATION: server_organization - - - function : BASE64_ENCODE_TO_STRING - lookup_fields: [ mail_subject ] - output_fields: [ mail_subject_base64 ] - parameters: - input_type: string - - - function: BASE64_DECODE_TO_STRING - output_fields: [ mail_attachment_name ] - parameters: - value_field: mail_attachment_name - charset_field: mail_attachment_name_charset - - function: CURRENT_UNIX_TIMESTAMP - output_fields: [ current_unix_timestamp_ms ] - parameters: - precision: milliseconds - - - function: DOMAIN - lookup_fields: [ http_host, ssl_sni, quic_sni ] - output_fields: [ server_domain ] - parameters: - option: FIRST_SIGNIFICANT_SUBDOMAIN - - - function: EVAL - output_fields: [ recv_time ] - parameters: - value_expression: current_unix_timestamp_ms - - - function: EVAL - output_fields: [ direction ] - parameters: - value_expression: "(flags & 24576) == 24576 ? 'double' : ((flags & 8192) == 8192 ? 'c2s' : ((flags & 16384) == 16384 ? 's2c' : 'unknown'))" - - - function: EVAL - output_fields: [ constant_value ] - parameters: - value_expression: "'abc'" - - - function: JSON_EXTRACT - lookup_fields: [ device_tag ] - output_fields: [ device_group ] - parameters: - value_expression: $.tags[?(@.tag=='device_group')][0].value - - - function: FLATTEN - lookup_fields: [ device_tag ] - parameters: - prefix: olap - json_string_keys: [device_tag] - - - function: FROM_UNIX_TIMESTAMP - lookup_fields: [ current_unix_timestamp_ms ] - output_fields: [ current_time_str ] - parameters: - precision: milliseconds - - - function: GENERATE_STRING_ARRAY - lookup_fields: [server_ip, server_port] - output_fields: [server_ip_port] - - - function: PATH_COMBINE - lookup_fields: [ rtp_pcap_path ] - output_fields: [ rtp_pcap_path ] - parameters: - path: [ props.hos.path, props.hos.bucket.name.rtp_file, rtp_pcap_path ] - - - function: PATH_COMBINE - lookup_fields: [ http_request_body ] - output_fields: [ http_request_body ] - parameters: - path: [ props.hos.path, props.hos.bucket.name.http_file, http_request_body ] - - - function: RENAME - parameters: - rename_fields: - current_unix_timestamp_ms: processing_time_ms - rename_expression: key = string.replace_all(key,'olap.device_tag.tags','device_tags'); return key; - - - function: UNIX_TIMESTAMP_CONVERTER - lookup_fields: [ __timestamp ] - output_fields: [stat_time_minute] - parameters: - precision: minutes - - dns_table_processor: - type: table - functions: - - function: UNROLL - lookup_fields: [ security_rule_id_list ] - output_fields: [ security_rule_id ] - - dns_aggregate_processor: - type: aggregate - group_by_fields: [ decoded_as ] - window_type: tumbling_processing_time - window_size: 5 - functions: - - function: LONG_COUNT - output_fields: [ count ] - - function: COLLECT_LIST - lookup_fields: [ client_ip ] - output_fields: [ client_ip_list ] - - - -sinks: - global_print_sink: - type: print - properties: - format: json - mode: log_warn - dns_print_sink: - type: print - properties: - format: json - mode: log_warn - http_print_sink: - type: print - properties: - format: json - mode: log_warn - - -application: - env: - name: example-inline-to-print - parallelism: 1 - pipeline: - object-reuse: true - - properties: - hos.path: http://192.168.44.12:9098/hos - hos.bucket.name.troubleshooting_file: troubleshooting_file_bucket - hos.bucket.name.rtp_file: job_level_traffic_rtp_file_bucket - hos.bucket.name.http_file: job_level_traffic_http_file_bucket - hos.bucket.name.eml_file: job_level_traffic_eml_file_bucket - hos.bucket.name.policy_capture_file: job_level_traffic_policy_capture_file_bucket - - topology: - - name: inline_source - downstream: [server_ip_filter] - - name: server_ip_filter - downstream: [ projection_processor ] - - name: projection_processor - downstream: [ global_print_sink, decoded_as_split ] - parallelism: 2 - - name: decoded_as_split - tags: [http_tag, dns_tag] - downstream: [ http_print_sink, dns_table_processor ] - parallelism: 2 - - name: dns_table_processor - downstream: [ dns_aggregate_processor ] - parallelism: 2 - - name: dns_aggregate_processor - downstream: [ dns_print_sink ] - parallelism: 2 - - name: global_print_sink - downstream: [] - - name: http_print_sink - downstream: [] - - name: dns_print_sink - downstream: [] \ No newline at end of file diff --git a/groot-tests/test-e2e-base/src/test/resources/kafka_to_print.yaml b/groot-tests/test-e2e-base/src/test/resources/kafka_to_print.yaml deleted file mode 100644 index b1e4f35..0000000 --- a/groot-tests/test-e2e-base/src/test/resources/kafka_to_print.yaml +++ /dev/null @@ -1,40 +0,0 @@ -sources: - kafka_source: - type : kafka - schema: - fields: # [array of object] Schema field projection, support read data only from specified fields. - - name: client_ip - type: string - - name: server_ip - type: string - properties: # [object] Kafka source properties - topic: SESSION-RECORD - kafka.bootstrap.servers: 192.168.44.11:9092 - kafka.session.timeout.ms: 60000 - kafka.max.poll.records: 3000 - kafka.max.partition.fetch.bytes: 31457280 - kafka.group.id: GROOT-STREAM-EXAMPLE-KAFKA-TO-PRINT - kafka.auto.offset.reset: latest - format: json - -sinks: # [object] Define connector sink - print_sink: - type: print - properties: - mode: log_info - format: json - -application: # [object] Define job configuration - env: - name: example-kafka-to-print - parallelism: 1 - pipeline: - object-reuse: true - execution: - restart: - strategy: no - topology: - - name: kafka_source - downstream: [print_sink] - - name: print_sink - downstream: [] \ No newline at end of file diff --git a/groot-tests/test-e2e-base/src/test/resources/test_env_parameter_inline_to_print.yaml b/groot-tests/test-e2e-base/src/test/resources/test_env_parameter_inline_to_print.yaml deleted file mode 100644 index 1d09282..0000000 --- a/groot-tests/test-e2e-base/src/test/resources/test_env_parameter_inline_to_print.yaml +++ /dev/null @@ -1,47 +0,0 @@ -sources: - inline_source: - type: inline - properties: - data: '[{"tcp_rtt_ms":128,"decoded_as":"HTTP","http_version":"http1","http_request_line":"GET / HTTP/1.1","http_host":"www.ct.cn","http_url":"www.ct.cn/","http_user_agent":"curl/8.0.1","http_status_code":200,"http_response_line":"HTTP/1.1 200 OK","http_response_content_type":"text/html; charset=UTF-8","http_response_latency_ms":31,"http_session_duration_ms":5451,"in_src_mac":"ba:bb:a7:3c:67:1c","in_dest_mac":"86:dd:7a:8f:ae:e2","out_src_mac":"86:dd:7a:8f:ae:e2","out_dest_mac":"ba:bb:a7:3c:67:1c","tcp_client_isn":678677906,"tcp_server_isn":1006700307,"address_type":4,"client_ip":"192.11.22.22","server_ip":"8.8.8.8","client_port":42751,"server_port":80,"in_link_id":65535,"out_link_id":65535,"start_timestamp_ms":1703646546127,"end_timestamp_ms":1703646551702,"duration_ms":5575,"sent_pkts":97,"sent_bytes":5892,"received_pkts":250,"received_bytes":333931},{"tcp_rtt_ms":256,"decoded_as":"HTTP","http_version":"http1","http_request_line":"GET / HTTP/1.1","http_host":"www.abc.cn","http_url":"www.cabc.cn/","http_user_agent":"curl/8.0.1","http_status_code":200,"http_response_line":"HTTP/1.1 200 OK","http_response_content_type":"text/html; charset=UTF-8","http_response_latency_ms":31,"http_session_duration_ms":5451,"in_src_mac":"ba:bb:a7:3c:67:1c","in_dest_mac":"86:dd:7a:8f:ae:e2","out_src_mac":"86:dd:7a:8f:ae:e2","out_dest_mac":"ba:bb:a7:3c:67:1c","tcp_client_isn":678677906,"tcp_server_isn":1006700307,"address_type":4,"client_ip":"192.168.10.198","server_ip":"4.4.4.4","client_port":42751,"server_port":80,"in_link_id":65535,"out_link_id":65535,"start_timestamp_ms":1703646546127,"end_timestamp_ms":1703646551702,"duration_ms":2575,"sent_pkts":197,"sent_bytes":5892,"received_pkts":350,"received_bytes":533931}]' - format: json - json.ignore.parse.errors: false - -sinks: - print_sink: - type: print - properties: - format: json - mode: log_warn - -application: - env: - name: example-inline-to-print - parallelism: 1 - execution: - runtime-mode: streaming - buffer-timeout: 10 - checkpointing: - interval: 10000 - mode: exactly_once - timeout: 1200000 - data-uri: file:///tmp/grootstream/checkpoints - max-concurrent-checkpoints: 2 - cleanup: true - min-pause: 100 - tolerable-failed-checkpoints: 5 - restart: - strategy: fixed-delay - attempts: 2 - delayBetweenAttempts: 1000 - state: - backend: rocksdb - flink: - pipeline: - max-parallelism: 5 - pipeline: - object-reuse: true - topology: - - name: inline_source - downstream: [print_sink] - - name: print_sink - downstream: [] \ No newline at end of file diff --git a/groot-tests/test-e2e-clickhouse/pom.xml b/groot-tests/test-e2e-clickhouse/pom.xml deleted file mode 100644 index d575f15..0000000 --- a/groot-tests/test-e2e-clickhouse/pom.xml +++ /dev/null @@ -1,88 +0,0 @@ - - - 4.0.0 - - com.geedgenetworks - groot-tests - ${revision} - - - test-e2e-clickhouse - Groot : Tests : E2E : ClickHouse - - - 11 - 11 - UTF-8 - 0.6.3 - 4.0.3 - 5.2.1 - - - - - - com.geedgenetworks - test-common - ${project.version} - test-jar - test - - - - org.testcontainers - clickhouse - ${testcontainer.version} - test - - - - com.zaxxer - HikariCP - ${hikaricp.version} - test - - - - org.lz4 - lz4-java - 1.8.0 - test - - - - - com.clickhouse - clickhouse-jdbc - ${clickhouse.jdbc.version} - test - - - - - org.apache.httpcomponents.client5 - httpclient5 - ${apache-httpclient.version} - test - - - - com.geedgenetworks - connector-clickhouse - ${project.version} - test - - - - - org.xerial.snappy - snappy-java - test - - - - - - \ No newline at end of file diff --git a/groot-tests/test-e2e-clickhouse/src/test/java/com/geedgenetworks/test/e2e/clickhouse/ClickHouseIT.java b/groot-tests/test-e2e-clickhouse/src/test/java/com/geedgenetworks/test/e2e/clickhouse/ClickHouseIT.java deleted file mode 100644 index 8b44ed7..0000000 --- a/groot-tests/test-e2e-clickhouse/src/test/java/com/geedgenetworks/test/e2e/clickhouse/ClickHouseIT.java +++ /dev/null @@ -1,353 +0,0 @@ -package com.geedgenetworks.test.e2e.clickhouse; - -import com.alibaba.fastjson2.JSON; -import com.geedgenetworks.test.common.TestResource; -import com.geedgenetworks.test.common.TestSuiteBase; -import com.geedgenetworks.test.common.container.ContainerUtil; -import com.geedgenetworks.test.common.container.TestContainer; -import com.geedgenetworks.test.common.container.TestContainerId; -import com.geedgenetworks.test.common.junit.DisabledOnContainer; -import com.google.common.collect.Maps; -import com.typesafe.config.Config; -import com.typesafe.config.ConfigFactory; -import lombok.extern.slf4j.Slf4j; -import org.awaitility.Awaitility; -import org.junit.jupiter.api.AfterAll; -import org.junit.jupiter.api.Assertions; -import org.junit.jupiter.api.BeforeAll; -import org.junit.jupiter.api.TestTemplate; -import org.testcontainers.containers.ClickHouseContainer; -import org.testcontainers.containers.Container; -import org.testcontainers.containers.output.Slf4jLogConsumer; -import org.testcontainers.lifecycle.Startables; -import org.testcontainers.shaded.org.apache.commons.io.IOUtils; -import org.testcontainers.utility.DockerLoggerFactory; -import org.testcontainers.utility.MountableFile; - -import java.io.File; -import java.io.IOException; -import java.io.InputStream; -import java.math.BigDecimal; -import java.nio.charset.StandardCharsets; -import java.sql.*; -import java.util.*; -import java.util.concurrent.CompletableFuture; -import java.util.concurrent.TimeUnit; -import java.util.stream.Collectors; -import java.util.stream.Stream; - -import static org.awaitility.Awaitility.await; - -@Slf4j -@DisabledOnContainer( - value = {TestContainerId.FLINK_1_17}, - disabledReason = "Override TestSuiteBase @DisabledOnContainer") -public class ClickHouseIT extends TestSuiteBase implements TestResource { - private static final String CLICKHOUSE_DOCKER_IMAGE = "clickhouse/clickhouse-server:23.3.19.32"; - private static final String DRIVER_CLASS = "com.clickhouse.jdbc.ClickHouseDriver"; - private static final String INIT_CLICKHOUSE_PATH = "/init/clickhouse_test_sql.conf"; - private static final String DATABASE = "default"; - private static final String SOURCE_TABLE = "source_table"; - private static final String SINK_TABLE = "sink_table"; - private static final String INSERT_SQL = "insert_sql"; - private static final String COMPARE_SQL = "compare_sql"; - private static final String HOST = "clickhouse"; - private static final Config CONFIG = getInitClickhouseConfig(); - private ClickHouseContainer clickHouseContainer; - private Connection connection; - private static final String[] default_columns = new String[] { - "id", - "c_array_string", - "c_array_short", - "c_array_int", - "c_array_long", - "c_array_float", - "c_array_double", - "c_string", - "c_int8", - "c_int16", - "c_int32", - "c_int64", - "c_float32", - "c_float64", - "c_decimal", - "c_nullable", - "c_lowcardinality" - }; - private static final List>TEST_DATASET = generateTestDataSet(); - - - @BeforeAll - @Override - public void startUp() throws Exception { - this.clickHouseContainer = - new ClickHouseContainer(CLICKHOUSE_DOCKER_IMAGE) - .withNetwork(NETWORK) - .withNetworkAliases(HOST) - .withCopyFileToContainer(MountableFile.forClasspathResource("init/users.xml"), "/etc/clickhouse-server/users.xml") - .withCopyFileToContainer(MountableFile.forClasspathResource("init/init-clickhouse.sql"), "/docker-entrypoint-initdb.d/init-clickhouse.sql") - .withLogConsumer( - new Slf4jLogConsumer( - DockerLoggerFactory.getLogger(CLICKHOUSE_DOCKER_IMAGE))); - - Startables.deepStart(Stream.of(this.clickHouseContainer)).join(); - System.out.println("Clickhouse JDBC URL: " + this.clickHouseContainer.getJdbcUrl()); - System.out.println("Clickhouse username: " + this.clickHouseContainer.getUsername()); - System.out.println("Clickhouse password: " + this.clickHouseContainer.getPassword()); - - log.info("Clickhouse container started"); - Awaitility.given() - .ignoreExceptions() - .await() - .atMost(360L, TimeUnit.SECONDS) - .untilAsserted(this::initConnection); - this.initializeClickhouseTable(); - this.batchInsertData(); - log.info(JSON.toJSONString(TEST_DATASET)); - - } - - private void initConnection() - throws SQLException, ClassNotFoundException, InstantiationException, - IllegalAccessException { - final Properties info = new Properties(); - info.put("user", this.clickHouseContainer.getUsername()); - info.put("password", this.clickHouseContainer.getPassword()); - this.connection = - ((Driver) Class.forName(DRIVER_CLASS).newInstance()) - .connect(this.clickHouseContainer.getJdbcUrl(), info); - - } - - @TestTemplate - public void testClickHouse(TestContainer container) throws Exception { - assertHasData(SOURCE_TABLE); - } - - @TestTemplate - public void testClickHouseDataTypeSinkTable(TestContainer container) throws Exception { - CompletableFuture.supplyAsync( - () -> { - try { - Container.ExecResult execResult = container.executeJob("/clickhouse_data_type_sink.yaml"); - Assertions.assertEquals(0, execResult.getExitCode(), execResult.getStderr()); - return execResult; - } catch (Exception e) { - log.error("Commit task exception:" + e.getMessage()); - throw new RuntimeException(e); - } - }); - - await().atMost(300000, TimeUnit.MILLISECONDS) - .untilAsserted( - () -> { - assertHasData(SINK_TABLE); - compareResult(); - }); - } - - - private void assertHasData(String table) { - String sql = String.format("select * from %s.%s limit 1", DATABASE, table); - try (Statement statement = connection.createStatement(); - ResultSet source = statement.executeQuery(sql);) { - Assertions.assertTrue(source.next()); - } catch (SQLException e) { - throw new RuntimeException("test clickhouse server image error", e); - } - } - - private void clearTable(String table) { - try (Statement statement = connection.createStatement()) { - statement.execute(String.format("truncate table %s.%s", DATABASE, table)); - } catch (SQLException e) { - throw new RuntimeException("Test clickhouse server image error", e); - } - } - - private void compareResult() throws SQLException, IOException { - String sourceSql = "select * from " + SOURCE_TABLE + " order by id "; - String sinkSql = "select * from " + SINK_TABLE + " order by id"; - try (Statement sourceStatement = connection.createStatement(); - Statement sinkStatement = connection.createStatement(); - ResultSet sourceResultSet = sourceStatement.executeQuery(sourceSql); - ResultSet sinkResultSet = sinkStatement.executeQuery(sinkSql)) { - Assertions.assertEquals( - sourceResultSet.getMetaData().getColumnCount(), - sinkResultSet.getMetaData().getColumnCount()); - String columns = String.join(",", default_columns); - Assertions.assertTrue( - compare(String.format(CONFIG.getString(COMPARE_SQL), columns, columns))); - } - - sourceSql = "select count(distinct id) as count from " + SOURCE_TABLE; - sinkSql = "select count(distinct id) as count from " + SINK_TABLE; - - try (Statement sourceStatement = connection.createStatement(); - Statement sinkStatement = connection.createStatement(); - ResultSet sourceResultSet = sourceStatement.executeQuery(sourceSql); - ResultSet sinkResultSet = sinkStatement.executeQuery(sinkSql)) { - while (sourceResultSet.next()) { - if (sinkResultSet.next()) { - int sinkUniqueIds = sinkResultSet.getInt("count"); - int sourceUniqueIds = sourceResultSet.getInt("count"); - Assertions.assertEquals(sinkUniqueIds, sourceUniqueIds); - } - } - } - - - } - - private Boolean compare(String sql) { - try (Statement statement = connection.createStatement(); - ResultSet resultSet = statement.executeQuery(sql);) { - return !resultSet.next(); - } catch (SQLException e) { - throw new RuntimeException("result compare error", e); - } - } - - - private void batchInsertData() { - String sql = CONFIG.getString(INSERT_SQL); - PreparedStatement preparedStatement = null; - try { - this.connection.setAutoCommit(true); - preparedStatement = this.connection.prepareStatement(sql); - for (Map row : TEST_DATASET) { - preparedStatement.setLong(1, (Long) row.get(default_columns[0])); - preparedStatement.setArray(2, toSqlArray(row.get(default_columns[1]))); - preparedStatement.setArray(3, toSqlArray(row.get(default_columns[2]))); - preparedStatement.setArray(4, toSqlArray(row.get(default_columns[3]))); - preparedStatement.setArray(5, toSqlArray(row.get(default_columns[4]))); - preparedStatement.setArray(6, toSqlArray(row.get(default_columns[5]))); - preparedStatement.setArray(7, toSqlArray(row.get(default_columns[6]))); - preparedStatement.setString(8, (String) row.get(default_columns[7])); - preparedStatement.setByte(9, (Byte) row.get(default_columns[8])); - preparedStatement.setShort(10, (Short) row.get(default_columns[9])); - preparedStatement.setInt(11, (Integer) row.get(default_columns[10])); - preparedStatement.setLong(12, (Long) row.get(default_columns[11])); - preparedStatement.setFloat(13, (Float) row.get(default_columns[12])); - preparedStatement.setDouble(14, (Double) row.get(default_columns[13])); - preparedStatement.setBigDecimal(15, (BigDecimal) row.get(default_columns[14])); - preparedStatement.setInt(16, (Integer) row.get(default_columns[15])); - preparedStatement.setString(17, (String) row.get(default_columns[16])); - preparedStatement.addBatch(); - } - - preparedStatement.executeBatch(); - preparedStatement.clearBatch(); - - } catch (SQLException e) { - throw new RuntimeException("Batch insert data failed!", e); - } finally { - if (preparedStatement != null) { - try { - preparedStatement.close(); - } catch (SQLException e) { - throw new RuntimeException("PreparedStatement close failed!", e); - } - } - } - - - - } - - - private Array toSqlArray(Object value) throws SQLException { - Object[] elements = null; - String sqlType = null; - if (String[].class.equals(value.getClass())) { - sqlType = "TEXT"; - elements = (String[]) value; - } else if (Boolean[].class.equals(value.getClass())) { - sqlType = "BOOLEAN"; - elements = (Boolean[]) value; - } else if (Byte[].class.equals(value.getClass())) { - sqlType = "TINYINT"; - elements = (Byte[]) value; - } else if (Short[].class.equals(value.getClass())) { - sqlType = "SMALLINT"; - elements = (Short[]) value; - } else if (Integer[].class.equals(value.getClass())) { - sqlType = "INTEGER"; - elements = (Integer[]) value; - } else if (Long[].class.equals(value.getClass())) { - sqlType = "BIGINT"; - elements = (Long[]) value; - } else if (Float[].class.equals(value.getClass())) { - sqlType = "REAL"; - elements = (Float[]) value; - } else if (Double[].class.equals(value.getClass())) { - sqlType = "DOUBLE"; - elements = (Double[]) value; - } - if (sqlType == null) { - throw new IllegalArgumentException( - "array inject error, not supported data type: " + value.getClass()); - } - return connection.createArrayOf(sqlType, elements); - } - - private static List> generateTestDataSet() { - List> rows = new ArrayList<>(); - for (int i = 0; i < 100; ++i) { - Map row = Maps.newLinkedHashMap(); - row.put(default_columns[0], (long) i); - row.put(default_columns[1], new String[] {"string"}); - row.put(default_columns[2], new Short[] {Short.parseShort("1")}); - row.put(default_columns[3], new Integer[] {Integer.parseInt("1")}); - row.put(default_columns[4], new Long[] {Long.parseLong("1")}); - row.put(default_columns[5], new Float[] {Float.parseFloat("1.1")}); - row.put(default_columns[6], new Double[] {Double.parseDouble("1.1")}); - row.put(default_columns[7], "string"); - row.put(default_columns[8], Byte.parseByte("1")); - row.put(default_columns[9], Short.parseShort("1")); - row.put(default_columns[10], Integer.parseInt("1")); - row.put(default_columns[11], Long.parseLong("1")); - row.put(default_columns[12], Float.parseFloat("1.1")); - row.put(default_columns[13], Double.parseDouble("1.1")); - row.put(default_columns[14], BigDecimal.valueOf(11L, 1)); - row.put(default_columns[15], i); - row.put(default_columns[16], "string"); - rows.add(row); - } - return rows; - } - - - - private void initializeClickhouseTable() { - try { - Statement statement = this.connection.createStatement(); - statement.execute(CONFIG.getString(SOURCE_TABLE)); - statement.execute(CONFIG.getString(SINK_TABLE)); - } catch (SQLException e) { - throw new RuntimeException("Initializing Clickhouse table failed!", e); - } - } - - private static Config getInitClickhouseConfig() { - File file = ContainerUtil.getResourcesFile(INIT_CLICKHOUSE_PATH); - Config config = ConfigFactory.parseFile(file); - assert config.hasPath(SOURCE_TABLE) - && config.hasPath(SINK_TABLE) - && config.hasPath(INSERT_SQL) - && config.hasPath(COMPARE_SQL); - return config; - } - - @AfterAll - @Override - public void tearDown() throws Exception { - if (this.connection != null) { - this.connection.close(); - } - if (this.clickHouseContainer != null) { - this.clickHouseContainer.stop(); - } - - } -} diff --git a/groot-tests/test-e2e-clickhouse/src/test/resources/clickhouse_data_type_sink.yaml b/groot-tests/test-e2e-clickhouse/src/test/resources/clickhouse_data_type_sink.yaml deleted file mode 100644 index 3406a67..0000000 --- a/groot-tests/test-e2e-clickhouse/src/test/resources/clickhouse_data_type_sink.yaml +++ /dev/null @@ -1,79 +0,0 @@ -sources: - inline_source: - type: inline - schema: - fields: - - name: id - type: bigint - - name: c_array_string - type: array - - name: c_array_short - type: array - - name: c_array_int - type: array - - name: c_array_long - type: array - - name: c_array_float - type: array - - name: c_array_double - type: array - - name: c_string - type: string - - name: c_int8 - type: int - - name: c_int16 - type: int - - name: c_int32 - type: int - - name: c_int64 - type: int - - name: c_float32 - type: float - - name: c_float64 - type: double - - name: c_decimal - type: double - - name: c_date - type: string - - name: c_datetime - type: string - - name: c_nullable - type: int - - name: c_lowcardinality - type: string - properties: - # - # [string] Event Data, it will be parsed to Map by the specified format. - # - data: '[{"id":0,"c_array_string":["string"],"c_array_short":[1],"c_array_int":[1],"c_array_long":[1],"c_array_float":[1.1],"c_array_double":[1.1],"c_string":"string","c_int8":1,"c_int16":1,"c_int32":1,"c_int64":1,"c_float32":1.1,"c_float64":1.1,"c_decimal":1.1,"c_nullable":0,"c_lowcardinality":"string"},{"id":1,"c_array_string":["string"],"c_array_short":[1],"c_array_int":[1],"c_array_long":[1],"c_array_float":[1.1],"c_array_double":[1.1],"c_string":"string","c_int8":1,"c_int16":1,"c_int32":1,"c_int64":1,"c_float32":1.1,"c_float64":1.1,"c_decimal":1.1,"c_nullable":1,"c_lowcardinality":"string"},{"id":2,"c_array_string":["string"],"c_array_short":[1],"c_array_int":[1],"c_array_long":[1],"c_array_float":[1.1],"c_array_double":[1.1],"c_string":"string","c_int8":1,"c_int16":1,"c_int32":1,"c_int64":1,"c_float32":1.1,"c_float64":1.1,"c_decimal":1.1,"c_nullable":2,"c_lowcardinality":"string"},{"id":3,"c_array_string":["string"],"c_array_short":[1],"c_array_int":[1],"c_array_long":[1],"c_array_float":[1.1],"c_array_double":[1.1],"c_string":"string","c_int8":1,"c_int16":1,"c_int32":1,"c_int64":1,"c_float32":1.1,"c_float64":1.1,"c_decimal":1.1,"c_nullable":3,"c_lowcardinality":"string"},{"id":4,"c_array_string":["string"],"c_array_short":[1],"c_array_int":[1],"c_array_long":[1],"c_array_float":[1.1],"c_array_double":[1.1],"c_string":"string","c_int8":1,"c_int16":1,"c_int32":1,"c_int64":1,"c_float32":1.1,"c_float64":1.1,"c_decimal":1.1,"c_nullable":4,"c_lowcardinality":"string"},{"id":5,"c_array_string":["string"],"c_array_short":[1],"c_array_int":[1],"c_array_long":[1],"c_array_float":[1.1],"c_array_double":[1.1],"c_string":"string","c_int8":1,"c_int16":1,"c_int32":1,"c_int64":1,"c_float32":1.1,"c_float64":1.1,"c_decimal":1.1,"c_nullable":5,"c_lowcardinality":"string"},{"id":6,"c_array_string":["string"],"c_array_short":[1],"c_array_int":[1],"c_array_long":[1],"c_array_float":[1.1],"c_array_double":[1.1],"c_string":"string","c_int8":1,"c_int16":1,"c_int32":1,"c_int64":1,"c_float32":1.1,"c_float64":1.1,"c_decimal":1.1,"c_nullable":6,"c_lowcardinality":"string"},{"id":7,"c_array_string":["string"],"c_array_short":[1],"c_array_int":[1],"c_array_long":[1],"c_array_float":[1.1],"c_array_double":[1.1],"c_string":"string","c_int8":1,"c_int16":1,"c_int32":1,"c_int64":1,"c_float32":1.1,"c_float64":1.1,"c_decimal":1.1,"c_nullable":7,"c_lowcardinality":"string"},{"id":8,"c_array_string":["string"],"c_array_short":[1],"c_array_int":[1],"c_array_long":[1],"c_array_float":[1.1],"c_array_double":[1.1],"c_string":"string","c_int8":1,"c_int16":1,"c_int32":1,"c_int64":1,"c_float32":1.1,"c_float64":1.1,"c_decimal":1.1,"c_nullable":8,"c_lowcardinality":"string"},{"id":9,"c_array_string":["string"],"c_array_short":[1],"c_array_int":[1],"c_array_long":[1],"c_array_float":[1.1],"c_array_double":[1.1],"c_string":"string","c_int8":1,"c_int16":1,"c_int32":1,"c_int64":1,"c_float32":1.1,"c_float64":1.1,"c_decimal":1.1,"c_nullable":9,"c_lowcardinality":"string"},{"id":10,"c_array_string":["string"],"c_array_short":[1],"c_array_int":[1],"c_array_long":[1],"c_array_float":[1.1],"c_array_double":[1.1],"c_string":"string","c_int8":1,"c_int16":1,"c_int32":1,"c_int64":1,"c_float32":1.1,"c_float64":1.1,"c_decimal":1.1,"c_nullable":10,"c_lowcardinality":"string"},{"id":11,"c_array_string":["string"],"c_array_short":[1],"c_array_int":[1],"c_array_long":[1],"c_array_float":[1.1],"c_array_double":[1.1],"c_string":"string","c_int8":1,"c_int16":1,"c_int32":1,"c_int64":1,"c_float32":1.1,"c_float64":1.1,"c_decimal":1.1,"c_nullable":11,"c_lowcardinality":"string"},{"id":12,"c_array_string":["string"],"c_array_short":[1],"c_array_int":[1],"c_array_long":[1],"c_array_float":[1.1],"c_array_double":[1.1],"c_string":"string","c_int8":1,"c_int16":1,"c_int32":1,"c_int64":1,"c_float32":1.1,"c_float64":1.1,"c_decimal":1.1,"c_nullable":12,"c_lowcardinality":"string"},{"id":13,"c_array_string":["string"],"c_array_short":[1],"c_array_int":[1],"c_array_long":[1],"c_array_float":[1.1],"c_array_double":[1.1],"c_string":"string","c_int8":1,"c_int16":1,"c_int32":1,"c_int64":1,"c_float32":1.1,"c_float64":1.1,"c_decimal":1.1,"c_nullable":13,"c_lowcardinality":"string"},{"id":14,"c_array_string":["string"],"c_array_short":[1],"c_array_int":[1],"c_array_long":[1],"c_array_float":[1.1],"c_array_double":[1.1],"c_string":"string","c_int8":1,"c_int16":1,"c_int32":1,"c_int64":1,"c_float32":1.1,"c_float64":1.1,"c_decimal":1.1,"c_nullable":14,"c_lowcardinality":"string"},{"id":15,"c_array_string":["string"],"c_array_short":[1],"c_array_int":[1],"c_array_long":[1],"c_array_float":[1.1],"c_array_double":[1.1],"c_string":"string","c_int8":1,"c_int16":1,"c_int32":1,"c_int64":1,"c_float32":1.1,"c_float64":1.1,"c_decimal":1.1,"c_nullable":15,"c_lowcardinality":"string"},{"id":16,"c_array_string":["string"],"c_array_short":[1],"c_array_int":[1],"c_array_long":[1],"c_array_float":[1.1],"c_array_double":[1.1],"c_string":"string","c_int8":1,"c_int16":1,"c_int32":1,"c_int64":1,"c_float32":1.1,"c_float64":1.1,"c_decimal":1.1,"c_nullable":16,"c_lowcardinality":"string"},{"id":17,"c_array_string":["string"],"c_array_short":[1],"c_array_int":[1],"c_array_long":[1],"c_array_float":[1.1],"c_array_double":[1.1],"c_string":"string","c_int8":1,"c_int16":1,"c_int32":1,"c_int64":1,"c_float32":1.1,"c_float64":1.1,"c_decimal":1.1,"c_nullable":17,"c_lowcardinality":"string"},{"id":18,"c_array_string":["string"],"c_array_short":[1],"c_array_int":[1],"c_array_long":[1],"c_array_float":[1.1],"c_array_double":[1.1],"c_string":"string","c_int8":1,"c_int16":1,"c_int32":1,"c_int64":1,"c_float32":1.1,"c_float64":1.1,"c_decimal":1.1,"c_nullable":18,"c_lowcardinality":"string"},{"id":19,"c_array_string":["string"],"c_array_short":[1],"c_array_int":[1],"c_array_long":[1],"c_array_float":[1.1],"c_array_double":[1.1],"c_string":"string","c_int8":1,"c_int16":1,"c_int32":1,"c_int64":1,"c_float32":1.1,"c_float64":1.1,"c_decimal":1.1,"c_nullable":19,"c_lowcardinality":"string"},{"id":20,"c_array_string":["string"],"c_array_short":[1],"c_array_int":[1],"c_array_long":[1],"c_array_float":[1.1],"c_array_double":[1.1],"c_string":"string","c_int8":1,"c_int16":1,"c_int32":1,"c_int64":1,"c_float32":1.1,"c_float64":1.1,"c_decimal":1.1,"c_nullable":20,"c_lowcardinality":"string"},{"id":21,"c_array_string":["string"],"c_array_short":[1],"c_array_int":[1],"c_array_long":[1],"c_array_float":[1.1],"c_array_double":[1.1],"c_string":"string","c_int8":1,"c_int16":1,"c_int32":1,"c_int64":1,"c_float32":1.1,"c_float64":1.1,"c_decimal":1.1,"c_nullable":21,"c_lowcardinality":"string"},{"id":22,"c_array_string":["string"],"c_array_short":[1],"c_array_int":[1],"c_array_long":[1],"c_array_float":[1.1],"c_array_double":[1.1],"c_string":"string","c_int8":1,"c_int16":1,"c_int32":1,"c_int64":1,"c_float32":1.1,"c_float64":1.1,"c_decimal":1.1,"c_nullable":22,"c_lowcardinality":"string"},{"id":23,"c_array_string":["string"],"c_array_short":[1],"c_array_int":[1],"c_array_long":[1],"c_array_float":[1.1],"c_array_double":[1.1],"c_string":"string","c_int8":1,"c_int16":1,"c_int32":1,"c_int64":1,"c_float32":1.1,"c_float64":1.1,"c_decimal":1.1,"c_nullable":23,"c_lowcardinality":"string"},{"id":24,"c_array_string":["string"],"c_array_short":[1],"c_array_int":[1],"c_array_long":[1],"c_array_float":[1.1],"c_array_double":[1.1],"c_string":"string","c_int8":1,"c_int16":1,"c_int32":1,"c_int64":1,"c_float32":1.1,"c_float64":1.1,"c_decimal":1.1,"c_nullable":24,"c_lowcardinality":"string"},{"id":25,"c_array_string":["string"],"c_array_short":[1],"c_array_int":[1],"c_array_long":[1],"c_array_float":[1.1],"c_array_double":[1.1],"c_string":"string","c_int8":1,"c_int16":1,"c_int32":1,"c_int64":1,"c_float32":1.1,"c_float64":1.1,"c_decimal":1.1,"c_nullable":25,"c_lowcardinality":"string"},{"id":26,"c_array_string":["string"],"c_array_short":[1],"c_array_int":[1],"c_array_long":[1],"c_array_float":[1.1],"c_array_double":[1.1],"c_string":"string","c_int8":1,"c_int16":1,"c_int32":1,"c_int64":1,"c_float32":1.1,"c_float64":1.1,"c_decimal":1.1,"c_nullable":26,"c_lowcardinality":"string"},{"id":27,"c_array_string":["string"],"c_array_short":[1],"c_array_int":[1],"c_array_long":[1],"c_array_float":[1.1],"c_array_double":[1.1],"c_string":"string","c_int8":1,"c_int16":1,"c_int32":1,"c_int64":1,"c_float32":1.1,"c_float64":1.1,"c_decimal":1.1,"c_nullable":27,"c_lowcardinality":"string"},{"id":28,"c_array_string":["string"],"c_array_short":[1],"c_array_int":[1],"c_array_long":[1],"c_array_float":[1.1],"c_array_double":[1.1],"c_string":"string","c_int8":1,"c_int16":1,"c_int32":1,"c_int64":1,"c_float32":1.1,"c_float64":1.1,"c_decimal":1.1,"c_nullable":28,"c_lowcardinality":"string"},{"id":29,"c_array_string":["string"],"c_array_short":[1],"c_array_int":[1],"c_array_long":[1],"c_array_float":[1.1],"c_array_double":[1.1],"c_string":"string","c_int8":1,"c_int16":1,"c_int32":1,"c_int64":1,"c_float32":1.1,"c_float64":1.1,"c_decimal":1.1,"c_nullable":29,"c_lowcardinality":"string"},{"id":30,"c_array_string":["string"],"c_array_short":[1],"c_array_int":[1],"c_array_long":[1],"c_array_float":[1.1],"c_array_double":[1.1],"c_string":"string","c_int8":1,"c_int16":1,"c_int32":1,"c_int64":1,"c_float32":1.1,"c_float64":1.1,"c_decimal":1.1,"c_nullable":30,"c_lowcardinality":"string"},{"id":31,"c_array_string":["string"],"c_array_short":[1],"c_array_int":[1],"c_array_long":[1],"c_array_float":[1.1],"c_array_double":[1.1],"c_string":"string","c_int8":1,"c_int16":1,"c_int32":1,"c_int64":1,"c_float32":1.1,"c_float64":1.1,"c_decimal":1.1,"c_nullable":31,"c_lowcardinality":"string"},{"id":32,"c_array_string":["string"],"c_array_short":[1],"c_array_int":[1],"c_array_long":[1],"c_array_float":[1.1],"c_array_double":[1.1],"c_string":"string","c_int8":1,"c_int16":1,"c_int32":1,"c_int64":1,"c_float32":1.1,"c_float64":1.1,"c_decimal":1.1,"c_nullable":32,"c_lowcardinality":"string"},{"id":33,"c_array_string":["string"],"c_array_short":[1],"c_array_int":[1],"c_array_long":[1],"c_array_float":[1.1],"c_array_double":[1.1],"c_string":"string","c_int8":1,"c_int16":1,"c_int32":1,"c_int64":1,"c_float32":1.1,"c_float64":1.1,"c_decimal":1.1,"c_nullable":33,"c_lowcardinality":"string"},{"id":34,"c_array_string":["string"],"c_array_short":[1],"c_array_int":[1],"c_array_long":[1],"c_array_float":[1.1],"c_array_double":[1.1],"c_string":"string","c_int8":1,"c_int16":1,"c_int32":1,"c_int64":1,"c_float32":1.1,"c_float64":1.1,"c_decimal":1.1,"c_nullable":34,"c_lowcardinality":"string"},{"id":35,"c_array_string":["string"],"c_array_short":[1],"c_array_int":[1],"c_array_long":[1],"c_array_float":[1.1],"c_array_double":[1.1],"c_string":"string","c_int8":1,"c_int16":1,"c_int32":1,"c_int64":1,"c_float32":1.1,"c_float64":1.1,"c_decimal":1.1,"c_nullable":35,"c_lowcardinality":"string"},{"id":36,"c_array_string":["string"],"c_array_short":[1],"c_array_int":[1],"c_array_long":[1],"c_array_float":[1.1],"c_array_double":[1.1],"c_string":"string","c_int8":1,"c_int16":1,"c_int32":1,"c_int64":1,"c_float32":1.1,"c_float64":1.1,"c_decimal":1.1,"c_nullable":36,"c_lowcardinality":"string"},{"id":37,"c_array_string":["string"],"c_array_short":[1],"c_array_int":[1],"c_array_long":[1],"c_array_float":[1.1],"c_array_double":[1.1],"c_string":"string","c_int8":1,"c_int16":1,"c_int32":1,"c_int64":1,"c_float32":1.1,"c_float64":1.1,"c_decimal":1.1,"c_nullable":37,"c_lowcardinality":"string"},{"id":38,"c_array_string":["string"],"c_array_short":[1],"c_array_int":[1],"c_array_long":[1],"c_array_float":[1.1],"c_array_double":[1.1],"c_string":"string","c_int8":1,"c_int16":1,"c_int32":1,"c_int64":1,"c_float32":1.1,"c_float64":1.1,"c_decimal":1.1,"c_nullable":38,"c_lowcardinality":"string"},{"id":39,"c_array_string":["string"],"c_array_short":[1],"c_array_int":[1],"c_array_long":[1],"c_array_float":[1.1],"c_array_double":[1.1],"c_string":"string","c_int8":1,"c_int16":1,"c_int32":1,"c_int64":1,"c_float32":1.1,"c_float64":1.1,"c_decimal":1.1,"c_nullable":39,"c_lowcardinality":"string"},{"id":40,"c_array_string":["string"],"c_array_short":[1],"c_array_int":[1],"c_array_long":[1],"c_array_float":[1.1],"c_array_double":[1.1],"c_string":"string","c_int8":1,"c_int16":1,"c_int32":1,"c_int64":1,"c_float32":1.1,"c_float64":1.1,"c_decimal":1.1,"c_nullable":40,"c_lowcardinality":"string"},{"id":41,"c_array_string":["string"],"c_array_short":[1],"c_array_int":[1],"c_array_long":[1],"c_array_float":[1.1],"c_array_double":[1.1],"c_string":"string","c_int8":1,"c_int16":1,"c_int32":1,"c_int64":1,"c_float32":1.1,"c_float64":1.1,"c_decimal":1.1,"c_nullable":41,"c_lowcardinality":"string"},{"id":42,"c_array_string":["string"],"c_array_short":[1],"c_array_int":[1],"c_array_long":[1],"c_array_float":[1.1],"c_array_double":[1.1],"c_string":"string","c_int8":1,"c_int16":1,"c_int32":1,"c_int64":1,"c_float32":1.1,"c_float64":1.1,"c_decimal":1.1,"c_nullable":42,"c_lowcardinality":"string"},{"id":43,"c_array_string":["string"],"c_array_short":[1],"c_array_int":[1],"c_array_long":[1],"c_array_float":[1.1],"c_array_double":[1.1],"c_string":"string","c_int8":1,"c_int16":1,"c_int32":1,"c_int64":1,"c_float32":1.1,"c_float64":1.1,"c_decimal":1.1,"c_nullable":43,"c_lowcardinality":"string"},{"id":44,"c_array_string":["string"],"c_array_short":[1],"c_array_int":[1],"c_array_long":[1],"c_array_float":[1.1],"c_array_double":[1.1],"c_string":"string","c_int8":1,"c_int16":1,"c_int32":1,"c_int64":1,"c_float32":1.1,"c_float64":1.1,"c_decimal":1.1,"c_nullable":44,"c_lowcardinality":"string"},{"id":45,"c_array_string":["string"],"c_array_short":[1],"c_array_int":[1],"c_array_long":[1],"c_array_float":[1.1],"c_array_double":[1.1],"c_string":"string","c_int8":1,"c_int16":1,"c_int32":1,"c_int64":1,"c_float32":1.1,"c_float64":1.1,"c_decimal":1.1,"c_nullable":45,"c_lowcardinality":"string"},{"id":46,"c_array_string":["string"],"c_array_short":[1],"c_array_int":[1],"c_array_long":[1],"c_array_float":[1.1],"c_array_double":[1.1],"c_string":"string","c_int8":1,"c_int16":1,"c_int32":1,"c_int64":1,"c_float32":1.1,"c_float64":1.1,"c_decimal":1.1,"c_nullable":46,"c_lowcardinality":"string"},{"id":47,"c_array_string":["string"],"c_array_short":[1],"c_array_int":[1],"c_array_long":[1],"c_array_float":[1.1],"c_array_double":[1.1],"c_string":"string","c_int8":1,"c_int16":1,"c_int32":1,"c_int64":1,"c_float32":1.1,"c_float64":1.1,"c_decimal":1.1,"c_nullable":47,"c_lowcardinality":"string"},{"id":48,"c_array_string":["string"],"c_array_short":[1],"c_array_int":[1],"c_array_long":[1],"c_array_float":[1.1],"c_array_double":[1.1],"c_string":"string","c_int8":1,"c_int16":1,"c_int32":1,"c_int64":1,"c_float32":1.1,"c_float64":1.1,"c_decimal":1.1,"c_nullable":48,"c_lowcardinality":"string"},{"id":49,"c_array_string":["string"],"c_array_short":[1],"c_array_int":[1],"c_array_long":[1],"c_array_float":[1.1],"c_array_double":[1.1],"c_string":"string","c_int8":1,"c_int16":1,"c_int32":1,"c_int64":1,"c_float32":1.1,"c_float64":1.1,"c_decimal":1.1,"c_nullable":49,"c_lowcardinality":"string"},{"id":50,"c_array_string":["string"],"c_array_short":[1],"c_array_int":[1],"c_array_long":[1],"c_array_float":[1.1],"c_array_double":[1.1],"c_string":"string","c_int8":1,"c_int16":1,"c_int32":1,"c_int64":1,"c_float32":1.1,"c_float64":1.1,"c_decimal":1.1,"c_nullable":50,"c_lowcardinality":"string"},{"id":51,"c_array_string":["string"],"c_array_short":[1],"c_array_int":[1],"c_array_long":[1],"c_array_float":[1.1],"c_array_double":[1.1],"c_string":"string","c_int8":1,"c_int16":1,"c_int32":1,"c_int64":1,"c_float32":1.1,"c_float64":1.1,"c_decimal":1.1,"c_nullable":51,"c_lowcardinality":"string"},{"id":52,"c_array_string":["string"],"c_array_short":[1],"c_array_int":[1],"c_array_long":[1],"c_array_float":[1.1],"c_array_double":[1.1],"c_string":"string","c_int8":1,"c_int16":1,"c_int32":1,"c_int64":1,"c_float32":1.1,"c_float64":1.1,"c_decimal":1.1,"c_nullable":52,"c_lowcardinality":"string"},{"id":53,"c_array_string":["string"],"c_array_short":[1],"c_array_int":[1],"c_array_long":[1],"c_array_float":[1.1],"c_array_double":[1.1],"c_string":"string","c_int8":1,"c_int16":1,"c_int32":1,"c_int64":1,"c_float32":1.1,"c_float64":1.1,"c_decimal":1.1,"c_nullable":53,"c_lowcardinality":"string"},{"id":54,"c_array_string":["string"],"c_array_short":[1],"c_array_int":[1],"c_array_long":[1],"c_array_float":[1.1],"c_array_double":[1.1],"c_string":"string","c_int8":1,"c_int16":1,"c_int32":1,"c_int64":1,"c_float32":1.1,"c_float64":1.1,"c_decimal":1.1,"c_nullable":54,"c_lowcardinality":"string"},{"id":55,"c_array_string":["string"],"c_array_short":[1],"c_array_int":[1],"c_array_long":[1],"c_array_float":[1.1],"c_array_double":[1.1],"c_string":"string","c_int8":1,"c_int16":1,"c_int32":1,"c_int64":1,"c_float32":1.1,"c_float64":1.1,"c_decimal":1.1,"c_nullable":55,"c_lowcardinality":"string"},{"id":56,"c_array_string":["string"],"c_array_short":[1],"c_array_int":[1],"c_array_long":[1],"c_array_float":[1.1],"c_array_double":[1.1],"c_string":"string","c_int8":1,"c_int16":1,"c_int32":1,"c_int64":1,"c_float32":1.1,"c_float64":1.1,"c_decimal":1.1,"c_nullable":56,"c_lowcardinality":"string"},{"id":57,"c_array_string":["string"],"c_array_short":[1],"c_array_int":[1],"c_array_long":[1],"c_array_float":[1.1],"c_array_double":[1.1],"c_string":"string","c_int8":1,"c_int16":1,"c_int32":1,"c_int64":1,"c_float32":1.1,"c_float64":1.1,"c_decimal":1.1,"c_nullable":57,"c_lowcardinality":"string"},{"id":58,"c_array_string":["string"],"c_array_short":[1],"c_array_int":[1],"c_array_long":[1],"c_array_float":[1.1],"c_array_double":[1.1],"c_string":"string","c_int8":1,"c_int16":1,"c_int32":1,"c_int64":1,"c_float32":1.1,"c_float64":1.1,"c_decimal":1.1,"c_nullable":58,"c_lowcardinality":"string"},{"id":59,"c_array_string":["string"],"c_array_short":[1],"c_array_int":[1],"c_array_long":[1],"c_array_float":[1.1],"c_array_double":[1.1],"c_string":"string","c_int8":1,"c_int16":1,"c_int32":1,"c_int64":1,"c_float32":1.1,"c_float64":1.1,"c_decimal":1.1,"c_nullable":59,"c_lowcardinality":"string"},{"id":60,"c_array_string":["string"],"c_array_short":[1],"c_array_int":[1],"c_array_long":[1],"c_array_float":[1.1],"c_array_double":[1.1],"c_string":"string","c_int8":1,"c_int16":1,"c_int32":1,"c_int64":1,"c_float32":1.1,"c_float64":1.1,"c_decimal":1.1,"c_nullable":60,"c_lowcardinality":"string"},{"id":61,"c_array_string":["string"],"c_array_short":[1],"c_array_int":[1],"c_array_long":[1],"c_array_float":[1.1],"c_array_double":[1.1],"c_string":"string","c_int8":1,"c_int16":1,"c_int32":1,"c_int64":1,"c_float32":1.1,"c_float64":1.1,"c_decimal":1.1,"c_nullable":61,"c_lowcardinality":"string"},{"id":62,"c_array_string":["string"],"c_array_short":[1],"c_array_int":[1],"c_array_long":[1],"c_array_float":[1.1],"c_array_double":[1.1],"c_string":"string","c_int8":1,"c_int16":1,"c_int32":1,"c_int64":1,"c_float32":1.1,"c_float64":1.1,"c_decimal":1.1,"c_nullable":62,"c_lowcardinality":"string"},{"id":63,"c_array_string":["string"],"c_array_short":[1],"c_array_int":[1],"c_array_long":[1],"c_array_float":[1.1],"c_array_double":[1.1],"c_string":"string","c_int8":1,"c_int16":1,"c_int32":1,"c_int64":1,"c_float32":1.1,"c_float64":1.1,"c_decimal":1.1,"c_nullable":63,"c_lowcardinality":"string"},{"id":64,"c_array_string":["string"],"c_array_short":[1],"c_array_int":[1],"c_array_long":[1],"c_array_float":[1.1],"c_array_double":[1.1],"c_string":"string","c_int8":1,"c_int16":1,"c_int32":1,"c_int64":1,"c_float32":1.1,"c_float64":1.1,"c_decimal":1.1,"c_nullable":64,"c_lowcardinality":"string"},{"id":65,"c_array_string":["string"],"c_array_short":[1],"c_array_int":[1],"c_array_long":[1],"c_array_float":[1.1],"c_array_double":[1.1],"c_string":"string","c_int8":1,"c_int16":1,"c_int32":1,"c_int64":1,"c_float32":1.1,"c_float64":1.1,"c_decimal":1.1,"c_nullable":65,"c_lowcardinality":"string"},{"id":66,"c_array_string":["string"],"c_array_short":[1],"c_array_int":[1],"c_array_long":[1],"c_array_float":[1.1],"c_array_double":[1.1],"c_string":"string","c_int8":1,"c_int16":1,"c_int32":1,"c_int64":1,"c_float32":1.1,"c_float64":1.1,"c_decimal":1.1,"c_nullable":66,"c_lowcardinality":"string"},{"id":67,"c_array_string":["string"],"c_array_short":[1],"c_array_int":[1],"c_array_long":[1],"c_array_float":[1.1],"c_array_double":[1.1],"c_string":"string","c_int8":1,"c_int16":1,"c_int32":1,"c_int64":1,"c_float32":1.1,"c_float64":1.1,"c_decimal":1.1,"c_nullable":67,"c_lowcardinality":"string"},{"id":68,"c_array_string":["string"],"c_array_short":[1],"c_array_int":[1],"c_array_long":[1],"c_array_float":[1.1],"c_array_double":[1.1],"c_string":"string","c_int8":1,"c_int16":1,"c_int32":1,"c_int64":1,"c_float32":1.1,"c_float64":1.1,"c_decimal":1.1,"c_nullable":68,"c_lowcardinality":"string"},{"id":69,"c_array_string":["string"],"c_array_short":[1],"c_array_int":[1],"c_array_long":[1],"c_array_float":[1.1],"c_array_double":[1.1],"c_string":"string","c_int8":1,"c_int16":1,"c_int32":1,"c_int64":1,"c_float32":1.1,"c_float64":1.1,"c_decimal":1.1,"c_nullable":69,"c_lowcardinality":"string"},{"id":70,"c_array_string":["string"],"c_array_short":[1],"c_array_int":[1],"c_array_long":[1],"c_array_float":[1.1],"c_array_double":[1.1],"c_string":"string","c_int8":1,"c_int16":1,"c_int32":1,"c_int64":1,"c_float32":1.1,"c_float64":1.1,"c_decimal":1.1,"c_nullable":70,"c_lowcardinality":"string"},{"id":71,"c_array_string":["string"],"c_array_short":[1],"c_array_int":[1],"c_array_long":[1],"c_array_float":[1.1],"c_array_double":[1.1],"c_string":"string","c_int8":1,"c_int16":1,"c_int32":1,"c_int64":1,"c_float32":1.1,"c_float64":1.1,"c_decimal":1.1,"c_nullable":71,"c_lowcardinality":"string"},{"id":72,"c_array_string":["string"],"c_array_short":[1],"c_array_int":[1],"c_array_long":[1],"c_array_float":[1.1],"c_array_double":[1.1],"c_string":"string","c_int8":1,"c_int16":1,"c_int32":1,"c_int64":1,"c_float32":1.1,"c_float64":1.1,"c_decimal":1.1,"c_nullable":72,"c_lowcardinality":"string"},{"id":73,"c_array_string":["string"],"c_array_short":[1],"c_array_int":[1],"c_array_long":[1],"c_array_float":[1.1],"c_array_double":[1.1],"c_string":"string","c_int8":1,"c_int16":1,"c_int32":1,"c_int64":1,"c_float32":1.1,"c_float64":1.1,"c_decimal":1.1,"c_nullable":73,"c_lowcardinality":"string"},{"id":74,"c_array_string":["string"],"c_array_short":[1],"c_array_int":[1],"c_array_long":[1],"c_array_float":[1.1],"c_array_double":[1.1],"c_string":"string","c_int8":1,"c_int16":1,"c_int32":1,"c_int64":1,"c_float32":1.1,"c_float64":1.1,"c_decimal":1.1,"c_nullable":74,"c_lowcardinality":"string"},{"id":75,"c_array_string":["string"],"c_array_short":[1],"c_array_int":[1],"c_array_long":[1],"c_array_float":[1.1],"c_array_double":[1.1],"c_string":"string","c_int8":1,"c_int16":1,"c_int32":1,"c_int64":1,"c_float32":1.1,"c_float64":1.1,"c_decimal":1.1,"c_nullable":75,"c_lowcardinality":"string"},{"id":76,"c_array_string":["string"],"c_array_short":[1],"c_array_int":[1],"c_array_long":[1],"c_array_float":[1.1],"c_array_double":[1.1],"c_string":"string","c_int8":1,"c_int16":1,"c_int32":1,"c_int64":1,"c_float32":1.1,"c_float64":1.1,"c_decimal":1.1,"c_nullable":76,"c_lowcardinality":"string"},{"id":77,"c_array_string":["string"],"c_array_short":[1],"c_array_int":[1],"c_array_long":[1],"c_array_float":[1.1],"c_array_double":[1.1],"c_string":"string","c_int8":1,"c_int16":1,"c_int32":1,"c_int64":1,"c_float32":1.1,"c_float64":1.1,"c_decimal":1.1,"c_nullable":77,"c_lowcardinality":"string"},{"id":78,"c_array_string":["string"],"c_array_short":[1],"c_array_int":[1],"c_array_long":[1],"c_array_float":[1.1],"c_array_double":[1.1],"c_string":"string","c_int8":1,"c_int16":1,"c_int32":1,"c_int64":1,"c_float32":1.1,"c_float64":1.1,"c_decimal":1.1,"c_nullable":78,"c_lowcardinality":"string"},{"id":79,"c_array_string":["string"],"c_array_short":[1],"c_array_int":[1],"c_array_long":[1],"c_array_float":[1.1],"c_array_double":[1.1],"c_string":"string","c_int8":1,"c_int16":1,"c_int32":1,"c_int64":1,"c_float32":1.1,"c_float64":1.1,"c_decimal":1.1,"c_nullable":79,"c_lowcardinality":"string"},{"id":80,"c_array_string":["string"],"c_array_short":[1],"c_array_int":[1],"c_array_long":[1],"c_array_float":[1.1],"c_array_double":[1.1],"c_string":"string","c_int8":1,"c_int16":1,"c_int32":1,"c_int64":1,"c_float32":1.1,"c_float64":1.1,"c_decimal":1.1,"c_nullable":80,"c_lowcardinality":"string"},{"id":81,"c_array_string":["string"],"c_array_short":[1],"c_array_int":[1],"c_array_long":[1],"c_array_float":[1.1],"c_array_double":[1.1],"c_string":"string","c_int8":1,"c_int16":1,"c_int32":1,"c_int64":1,"c_float32":1.1,"c_float64":1.1,"c_decimal":1.1,"c_nullable":81,"c_lowcardinality":"string"},{"id":82,"c_array_string":["string"],"c_array_short":[1],"c_array_int":[1],"c_array_long":[1],"c_array_float":[1.1],"c_array_double":[1.1],"c_string":"string","c_int8":1,"c_int16":1,"c_int32":1,"c_int64":1,"c_float32":1.1,"c_float64":1.1,"c_decimal":1.1,"c_nullable":82,"c_lowcardinality":"string"},{"id":83,"c_array_string":["string"],"c_array_short":[1],"c_array_int":[1],"c_array_long":[1],"c_array_float":[1.1],"c_array_double":[1.1],"c_string":"string","c_int8":1,"c_int16":1,"c_int32":1,"c_int64":1,"c_float32":1.1,"c_float64":1.1,"c_decimal":1.1,"c_nullable":83,"c_lowcardinality":"string"},{"id":84,"c_array_string":["string"],"c_array_short":[1],"c_array_int":[1],"c_array_long":[1],"c_array_float":[1.1],"c_array_double":[1.1],"c_string":"string","c_int8":1,"c_int16":1,"c_int32":1,"c_int64":1,"c_float32":1.1,"c_float64":1.1,"c_decimal":1.1,"c_nullable":84,"c_lowcardinality":"string"},{"id":85,"c_array_string":["string"],"c_array_short":[1],"c_array_int":[1],"c_array_long":[1],"c_array_float":[1.1],"c_array_double":[1.1],"c_string":"string","c_int8":1,"c_int16":1,"c_int32":1,"c_int64":1,"c_float32":1.1,"c_float64":1.1,"c_decimal":1.1,"c_nullable":85,"c_lowcardinality":"string"},{"id":86,"c_array_string":["string"],"c_array_short":[1],"c_array_int":[1],"c_array_long":[1],"c_array_float":[1.1],"c_array_double":[1.1],"c_string":"string","c_int8":1,"c_int16":1,"c_int32":1,"c_int64":1,"c_float32":1.1,"c_float64":1.1,"c_decimal":1.1,"c_nullable":86,"c_lowcardinality":"string"},{"id":87,"c_array_string":["string"],"c_array_short":[1],"c_array_int":[1],"c_array_long":[1],"c_array_float":[1.1],"c_array_double":[1.1],"c_string":"string","c_int8":1,"c_int16":1,"c_int32":1,"c_int64":1,"c_float32":1.1,"c_float64":1.1,"c_decimal":1.1,"c_nullable":87,"c_lowcardinality":"string"},{"id":88,"c_array_string":["string"],"c_array_short":[1],"c_array_int":[1],"c_array_long":[1],"c_array_float":[1.1],"c_array_double":[1.1],"c_string":"string","c_int8":1,"c_int16":1,"c_int32":1,"c_int64":1,"c_float32":1.1,"c_float64":1.1,"c_decimal":1.1,"c_nullable":88,"c_lowcardinality":"string"},{"id":89,"c_array_string":["string"],"c_array_short":[1],"c_array_int":[1],"c_array_long":[1],"c_array_float":[1.1],"c_array_double":[1.1],"c_string":"string","c_int8":1,"c_int16":1,"c_int32":1,"c_int64":1,"c_float32":1.1,"c_float64":1.1,"c_decimal":1.1,"c_nullable":89,"c_lowcardinality":"string"},{"id":90,"c_array_string":["string"],"c_array_short":[1],"c_array_int":[1],"c_array_long":[1],"c_array_float":[1.1],"c_array_double":[1.1],"c_string":"string","c_int8":1,"c_int16":1,"c_int32":1,"c_int64":1,"c_float32":1.1,"c_float64":1.1,"c_decimal":1.1,"c_nullable":90,"c_lowcardinality":"string"},{"id":91,"c_array_string":["string"],"c_array_short":[1],"c_array_int":[1],"c_array_long":[1],"c_array_float":[1.1],"c_array_double":[1.1],"c_string":"string","c_int8":1,"c_int16":1,"c_int32":1,"c_int64":1,"c_float32":1.1,"c_float64":1.1,"c_decimal":1.1,"c_nullable":91,"c_lowcardinality":"string"},{"id":92,"c_array_string":["string"],"c_array_short":[1],"c_array_int":[1],"c_array_long":[1],"c_array_float":[1.1],"c_array_double":[1.1],"c_string":"string","c_int8":1,"c_int16":1,"c_int32":1,"c_int64":1,"c_float32":1.1,"c_float64":1.1,"c_decimal":1.1,"c_nullable":92,"c_lowcardinality":"string"},{"id":93,"c_array_string":["string"],"c_array_short":[1],"c_array_int":[1],"c_array_long":[1],"c_array_float":[1.1],"c_array_double":[1.1],"c_string":"string","c_int8":1,"c_int16":1,"c_int32":1,"c_int64":1,"c_float32":1.1,"c_float64":1.1,"c_decimal":1.1,"c_nullable":93,"c_lowcardinality":"string"},{"id":94,"c_array_string":["string"],"c_array_short":[1],"c_array_int":[1],"c_array_long":[1],"c_array_float":[1.1],"c_array_double":[1.1],"c_string":"string","c_int8":1,"c_int16":1,"c_int32":1,"c_int64":1,"c_float32":1.1,"c_float64":1.1,"c_decimal":1.1,"c_nullable":94,"c_lowcardinality":"string"},{"id":95,"c_array_string":["string"],"c_array_short":[1],"c_array_int":[1],"c_array_long":[1],"c_array_float":[1.1],"c_array_double":[1.1],"c_string":"string","c_int8":1,"c_int16":1,"c_int32":1,"c_int64":1,"c_float32":1.1,"c_float64":1.1,"c_decimal":1.1,"c_nullable":95,"c_lowcardinality":"string"},{"id":96,"c_array_string":["string"],"c_array_short":[1],"c_array_int":[1],"c_array_long":[1],"c_array_float":[1.1],"c_array_double":[1.1],"c_string":"string","c_int8":1,"c_int16":1,"c_int32":1,"c_int64":1,"c_float32":1.1,"c_float64":1.1,"c_decimal":1.1,"c_nullable":96,"c_lowcardinality":"string"},{"id":97,"c_array_string":["string"],"c_array_short":[1],"c_array_int":[1],"c_array_long":[1],"c_array_float":[1.1],"c_array_double":[1.1],"c_string":"string","c_int8":1,"c_int16":1,"c_int32":1,"c_int64":1,"c_float32":1.1,"c_float64":1.1,"c_decimal":1.1,"c_nullable":97,"c_lowcardinality":"string"},{"id":98,"c_array_string":["string"],"c_array_short":[1],"c_array_int":[1],"c_array_long":[1],"c_array_float":[1.1],"c_array_double":[1.1],"c_string":"string","c_int8":1,"c_int16":1,"c_int32":1,"c_int64":1,"c_float32":1.1,"c_float64":1.1,"c_decimal":1.1,"c_nullable":98,"c_lowcardinality":"string"},{"id":99,"c_array_string":["string"],"c_array_short":[1],"c_array_int":[1],"c_array_long":[1],"c_array_float":[1.1],"c_array_double":[1.1],"c_string":"string","c_int8":1,"c_int16":1,"c_int32":1,"c_int64":1,"c_float32":1.1,"c_float64":1.1,"c_decimal":1.1,"c_nullable":99,"c_lowcardinality":"string"}]' - format: json - interval.per.row: 10ms - repeat.count: -1 - json.ignore.parse.errors: false - - -sinks: - clickhouse_sink: - type: clickhouse - properties: - host: clickhouse:9000 - table: sink_table - connection.database: default - batch.size: 100 - batch.byte.size: 200MB - batch.interval: 1s - connection.user: ee9b0016824d59c8c191aa9633e4b61e - connection.password: ee9b0016824d59c8c191aa9633e4b61e - -application: # [object] Define job configuration - env: - name: example-inline-to-clickhouse - parallelism: 1 - shade.identifier: aes - pipeline: - object-reuse: true - topology: - - name: inline_source - downstream: [ clickhouse_sink ] - - name: clickhouse_sink - downstream: [] \ No newline at end of file diff --git a/groot-tests/test-e2e-clickhouse/src/test/resources/init/clickhouse_test_sql.conf b/groot-tests/test-e2e-clickhouse/src/test/resources/init/clickhouse_test_sql.conf deleted file mode 100644 index f132795..0000000 --- a/groot-tests/test-e2e-clickhouse/src/test/resources/init/clickhouse_test_sql.conf +++ /dev/null @@ -1,81 +0,0 @@ -source_table = """ -set allow_experimental_geo_types = 1; -create table if not exists `default`.source_table( - `id` Int64, - `c_array_string` Array(String), - `c_array_short` Array(Int16), - `c_array_int` Array(Int32), - `c_array_long` Array(Int64), - `c_array_float` Array(Float32), - `c_array_double` Array(Float64), - `c_string` String, - `c_int8` Int8, - `c_int16` Int16, - `c_int32` Int32, - `c_int64` Int64, - `c_float32` Float32, - `c_float64` Float64, - `c_decimal` Decimal(9,4), - `c_nullable` Nullable(Int32), - `c_lowcardinality` LowCardinality(String) -)engine=Memory; -""" - -sink_table = """ -create table if not exists `default`.sink_table( - `id` Int64, - `c_array_string` Array(String), - `c_array_short` Array(Int16), - `c_array_int` Array(Int32), - `c_array_long` Array(Int64), - `c_array_float` Array(Float32), - `c_array_double` Array(Float64), - `c_string` String, - `c_int8` Int8, - `c_int16` Int16, - `c_int32` Int32, - `c_int64` Int64, - `c_float32` Float32, - `c_float64` Float64, - `c_decimal` Decimal(9,4), - `c_nullable` Nullable(Int32), - `c_lowcardinality` LowCardinality(String) -)engine=Memory; -""" - -insert_sql = """ -insert into `default`.source_table -( - `id`, - `c_array_string`, - `c_array_short`, - `c_array_int`, - `c_array_long`, - `c_array_float`, - `c_array_double`, - `c_string`, - `c_int8`, - `c_int16`, - `c_int32`, - `c_int64`, - `c_float32`, - `c_float64`, - `c_decimal`, - `c_nullable`, - `c_lowcardinality` -) -values -(?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?) -""" - -compare_sql = """ -select - %s - from ( - select * from default.source_table -union all - select * from default.sink_table - ) -group by %s -having count(*) < 2 -""" \ No newline at end of file diff --git a/groot-tests/test-e2e-clickhouse/src/test/resources/init/init-clickhouse.sql b/groot-tests/test-e2e-clickhouse/src/test/resources/init/init-clickhouse.sql deleted file mode 100644 index fd9daac..0000000 --- a/groot-tests/test-e2e-clickhouse/src/test/resources/init/init-clickhouse.sql +++ /dev/null @@ -1,4 +0,0 @@ -show databases; --- ALTER USER default IDENTIFIED WITH plaintext_password BY 'testuser'; -CREATE USER testuser IDENTIFIED WITH plaintext_password BY 'testuser'; -GRANT ALL ON *.* TO testuser; \ No newline at end of file diff --git a/groot-tests/test-e2e-clickhouse/src/test/resources/init/users.xml b/groot-tests/test-e2e-clickhouse/src/test/resources/init/users.xml deleted file mode 100644 index 86a590d..0000000 --- a/groot-tests/test-e2e-clickhouse/src/test/resources/init/users.xml +++ /dev/null @@ -1,29 +0,0 @@ - - - - - default - 1 - 1 - 1 - 1 - - ::/0 - - - ALTER TABLE ON *.* - CREATE USER ON *.* - GRANT ON *.* - - - - - - - 10000000000 - 1 - random - 8 - - - diff --git a/groot-tests/test-e2e-common/pom.xml b/groot-tests/test-e2e-common/pom.xml new file mode 100644 index 0000000..be0de94 --- /dev/null +++ b/groot-tests/test-e2e-common/pom.xml @@ -0,0 +1,45 @@ + + + 4.0.0 + + com.geedgenetworks + groot-tests + ${revision} + + + test-e2e-common + Groot : Tests : E2E: Common + + + + + + + + + + + + + + org.apache.maven.plugins + maven-jar-plugin + ${maven-jar-plugin.version} + + false + + + + + test-jar + + + + + + + + + \ No newline at end of file diff --git a/groot-tests/test-e2e-common/src/test/java/com/geedgenetworks/test/e2e/common/AbstractFlinkContainer.java b/groot-tests/test-e2e-common/src/test/java/com/geedgenetworks/test/e2e/common/AbstractFlinkContainer.java new file mode 100644 index 0000000..a44c65a --- /dev/null +++ b/groot-tests/test-e2e-common/src/test/java/com/geedgenetworks/test/e2e/common/AbstractFlinkContainer.java @@ -0,0 +1,37 @@ +package com.geedgenetworks.test.e2e.common; + + +import com.geedgenetworks.test.e2e.common.container.AbstractTestFlinkContainer; +import lombok.extern.slf4j.Slf4j; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.TestInstance; +import org.testcontainers.containers.Container; + +import java.io.IOException; + +@Slf4j +@TestInstance(TestInstance.Lifecycle.PER_CLASS) +public abstract class AbstractFlinkContainer extends AbstractTestFlinkContainer { + @Override + @BeforeAll + public void startUp() throws Exception { + super.startUp(); + log.info("The TestContainer[{}] is running.", identifier()); + } + + @Override + @AfterAll + public void tearDown() throws Exception { + super.tearDown(); + log.info("The TestContainer[{}] is closed.", identifier()); + } + + + public Container.ExecResult executeGrootStreamFlinkJob(String confFile) + throws IOException, InterruptedException { + return executeJob(confFile); + } + + +} diff --git a/groot-tests/test-e2e-common/src/test/java/com/geedgenetworks/test/e2e/common/TestResource.java b/groot-tests/test-e2e-common/src/test/java/com/geedgenetworks/test/e2e/common/TestResource.java new file mode 100644 index 0000000..fc04588 --- /dev/null +++ b/groot-tests/test-e2e-common/src/test/java/com/geedgenetworks/test/e2e/common/TestResource.java @@ -0,0 +1,27 @@ +package com.geedgenetworks.test.e2e.common; + +/** + * Basic abstractions for all resources used in connector testing framework. + * + *

Lifecycle of test resources will be managed by the framework. + */ +public interface TestResource { + + /** + * Start up the test resource. + * + *

The implementation of this method should be idempotent. + * + * @throws Exception if anything wrong when starting the resource + */ + void startUp() throws Exception; + + /** + * Tear down the test resource. + * + *

The test resource should be able to tear down even without a startup (could be a no-op). + * + * @throws Exception if anything wrong when tearing the resource down + */ + void tearDown() throws Exception; +} diff --git a/groot-tests/test-e2e-common/src/test/java/com/geedgenetworks/test/e2e/common/TestSuiteBase.java b/groot-tests/test-e2e-common/src/test/java/com/geedgenetworks/test/e2e/common/TestSuiteBase.java new file mode 100644 index 0000000..6417062 --- /dev/null +++ b/groot-tests/test-e2e-common/src/test/java/com/geedgenetworks/test/e2e/common/TestSuiteBase.java @@ -0,0 +1,29 @@ +package com.geedgenetworks.test.e2e.common; + +import com.geedgenetworks.test.e2e.common.container.ContainerUtil; +import com.geedgenetworks.test.e2e.common.container.TestContainer; +import com.geedgenetworks.test.e2e.common.container.TestContainersFactory; +import com.geedgenetworks.test.e2e.common.junit.ContainerTestingExtension; +import com.geedgenetworks.test.e2e.common.junit.TestCaseInvocationContextProvider; +import com.geedgenetworks.test.e2e.common.junit.TestContainers; +import com.geedgenetworks.test.e2e.common.junit.TestLoggerExtension; +import com.github.dockerjava.api.DockerClient; +import org.junit.jupiter.api.TestInstance; +import org.junit.jupiter.api.extension.ExtendWith; +import org.testcontainers.DockerClientFactory; +import org.testcontainers.containers.Network; + +@ExtendWith({ + ContainerTestingExtension.class, + TestLoggerExtension.class, + TestCaseInvocationContextProvider.class +}) +@TestInstance(TestInstance.Lifecycle.PER_CLASS) +public abstract class TestSuiteBase { + protected static final Network NETWORK = TestContainer.NETWORK; + @TestContainers + private TestContainersFactory containersFactory = ContainerUtil::discoverTestContainers; + protected DockerClient dockerClient = DockerClientFactory.lazyClient(); + + +} diff --git a/groot-tests/test-e2e-common/src/test/java/com/geedgenetworks/test/e2e/common/container/AbstractTestContainer.java b/groot-tests/test-e2e-common/src/test/java/com/geedgenetworks/test/e2e/common/container/AbstractTestContainer.java new file mode 100644 index 0000000..35b4d8d --- /dev/null +++ b/groot-tests/test-e2e-common/src/test/java/com/geedgenetworks/test/e2e/common/container/AbstractTestContainer.java @@ -0,0 +1,192 @@ +package com.geedgenetworks.test.e2e.common.container; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.testcontainers.containers.Container; +import org.testcontainers.containers.GenericContainer; + +import java.io.File; +import java.io.IOException; +import java.nio.file.Paths; +import java.util.ArrayList; +import java.util.List; +import static com.geedgenetworks.test.e2e.common.container.ContainerUtil.PROJECT_ROOT_PATH; +public abstract class AbstractTestContainer implements TestContainer { + protected static final Logger LOG = LoggerFactory.getLogger(AbstractTestContainer.class); + + public static final String GROOTSTREAM_HOME = "/tmp/grootstream/"; + + protected final String startModuleName; + + protected final String startModuleFullPath; + + public AbstractTestContainer() { + this.startModuleName = getStartModuleName(); + this.startModuleFullPath = + PROJECT_ROOT_PATH + + File.separator + + this.startModuleName; + ContainerUtil.checkPathExist(startModuleFullPath); + } + + protected abstract String getDockerImage(); + + protected abstract String getStartModuleName(); + + protected abstract String getStartShellName(); + + protected abstract String getConnectorModulePath(); + + protected abstract String getConnectorType(); + + protected abstract String getSavePointCommand(); + + protected abstract String getRestoreCommand(); + + protected abstract String getConnectorNamePrefix(); + + protected abstract List getExtraStartShellCommands(); + + protected void executeExtraCommands(GenericContainer container) + throws IOException, InterruptedException { + // do nothing + } + + protected void copyGrootStreamStarterToContainer(GenericContainer container) { + ContainerUtil.copyGrootStreamStarterToContainer( + container, this.startModuleName, this.startModuleFullPath, GROOTSTREAM_HOME); + } + + protected void copyGrootStreamStarterLoggingToContainer(GenericContainer container) { + ContainerUtil.copyGrootStreamStarterLoggingToContainer( + container, this.startModuleFullPath, GROOTSTREAM_HOME); + } + + protected Container.ExecResult executeJob(GenericContainer container, String confFile, List variables) + throws IOException, InterruptedException { + + final String confInContainerPath = ContainerUtil.copyConfigFileToContainer(container, confFile); + // copy connectors + ContainerUtil.copyConnectorJarToContainer( + container, + confFile, + getConnectorModulePath(), + getConnectorNamePrefix(), + getConnectorType(), + GROOTSTREAM_HOME); + final List command = new ArrayList<>(); + String binPath = Paths.get(GROOTSTREAM_HOME, "bin", getStartShellName()).toString(); + // base command + command.add(ContainerUtil.adaptPathForWin(binPath)); + command.add("--config"); + command.add(ContainerUtil.adaptPathForWin(confInContainerPath)); + command.add("--target"); + command.add("remote"); + List extraStartShellCommands = new ArrayList<>(getExtraStartShellCommands()); + if (variables != null && !variables.isEmpty()) { + variables.forEach( + v -> { + extraStartShellCommands.add("-i"); + extraStartShellCommands.add(v); + }); + } + command.addAll(extraStartShellCommands); + return executeCommand(container, command); + } + + + + protected Container.ExecResult executeJob(GenericContainer container, String confFile) + throws IOException, InterruptedException { + return executeJob(container, confFile, null); + } + + + + protected Container.ExecResult savepointJob(GenericContainer container, String jobId) + throws IOException, InterruptedException { + final List command = new ArrayList<>(); + String binPath = Paths.get(GROOTSTREAM_HOME, "bin", getStartShellName()).toString(); + // base command + command.add(ContainerUtil.adaptPathForWin(binPath)); + command.add(getSavePointCommand()); + command.add(jobId); + command.addAll(getExtraStartShellCommands()); + return executeCommand(container, command); + } + + protected Container.ExecResult restoreJob( + GenericContainer container, String confFile, String jobId) + throws IOException, InterruptedException { + final String confInContainerPath = ContainerUtil.copyConfigFileToContainer(container, confFile); + // copy connectors + ContainerUtil.copyConnectorJarToContainer( + container, + confFile, + getConnectorModulePath(), + getConnectorNamePrefix(), + getConnectorType(), + GROOTSTREAM_HOME); + final List command = new ArrayList<>(); + String binPath = Paths.get(GROOTSTREAM_HOME, "bin", getStartShellName()).toString(); + // base command + command.add(ContainerUtil.adaptPathForWin(binPath)); + command.add("--config"); + command.add(ContainerUtil.adaptPathForWin(confInContainerPath)); + command.add(getRestoreCommand()); + command.add(jobId); + command.addAll(getExtraStartShellCommands()); + return executeCommand(container, command); + } + + protected Container.ExecResult executeCommand( + GenericContainer container, List command) + throws IOException, InterruptedException { + String commandStr = String.join(" ", command); + LOG.info( + "Execute command in container[{}] " + + "\n==================== Shell Command start ====================\n" + + "{}" + + "\n==================== Shell Command end ====================", + container.getDockerImageName(), + commandStr); + Container.ExecResult execResult = container.execInContainer("bash", "-c", commandStr); + + if (execResult.getStdout() != null && !execResult.getStdout().isEmpty()) { + LOG.info( + "Container[{}] command {} STDOUT:" + + "\n==================== STDOUT start ====================\n" + + "{}" + + "\n==================== STDOUT end ====================", + container.getDockerImageName(), + commandStr, + execResult.getStdout()); + } + if (execResult.getStderr() != null && !execResult.getStderr().isEmpty()) { + LOG.error( + "Container[{}] command {} STDERR:" + + "\n==================== STDERR start ====================\n" + + "{}" + + "\n==================== STDERR end ====================", + container.getDockerImageName(), + commandStr, + execResult.getStderr()); + } + + if (execResult.getExitCode() != 0) { + LOG.info( + "Container[{}] command {} Server Log:" + + "\n==================== Server Log start ====================\n" + + "{}" + + "\n==================== Server Log end ====================", + container.getDockerImageName(), + commandStr, + container.getLogs()); + } + + return execResult; + } + + + +} diff --git a/groot-tests/test-e2e-common/src/test/java/com/geedgenetworks/test/e2e/common/container/AbstractTestFlinkContainer.java b/groot-tests/test-e2e-common/src/test/java/com/geedgenetworks/test/e2e/common/container/AbstractTestFlinkContainer.java new file mode 100644 index 0000000..b558e9b --- /dev/null +++ b/groot-tests/test-e2e-common/src/test/java/com/geedgenetworks/test/e2e/common/container/AbstractTestFlinkContainer.java @@ -0,0 +1,157 @@ +package com.geedgenetworks.test.e2e.common.container; + +import com.google.common.collect.Lists; +import lombok.NoArgsConstructor; +import lombok.extern.slf4j.Slf4j; +import org.testcontainers.containers.Container; +import org.testcontainers.containers.GenericContainer; +import org.testcontainers.containers.output.Slf4jLogConsumer; +import org.testcontainers.containers.wait.strategy.LogMessageWaitStrategy; +import org.testcontainers.lifecycle.Startables; +import org.testcontainers.utility.DockerLoggerFactory; + +import java.io.IOException; +import java.time.Duration; +import java.util.Arrays; +import java.util.Collections; +import java.util.List; +import java.util.stream.Stream; + +@NoArgsConstructor +@Slf4j +public abstract class AbstractTestFlinkContainer extends AbstractTestContainer { + protected static final List DEFAULT_FLINK_PROPERTIES = + Arrays.asList( + "jobmanager.rpc.address: jobmanager", + "taskmanager.numberOfTaskSlots: 10", + "parallelism.default: 3", + "env.java.opts: -Doracle.jdbc.timezoneAsRegion=false"); + + protected static final String DEFAULT_DOCKER_IMAGE = "flink:1.13.1-scala_2.11-java11"; + + protected GenericContainer jobManager; + protected GenericContainer taskManager; + + @Override + protected String getDockerImage() { + return DEFAULT_DOCKER_IMAGE; + } + + @Override + public void startUp() throws Exception { + final String dockerImage = getDockerImage(); + final String properties = String.join("\n", getFlinkProperties()); + jobManager = + new GenericContainer<>(dockerImage) + .withCommand("jobmanager") + .withNetwork(NETWORK) + .withNetworkAliases("jobmanager") + .withExposedPorts() + .withEnv("FLINK_PROPERTIES", properties) + .withLogConsumer( + new Slf4jLogConsumer( + DockerLoggerFactory.getLogger(dockerImage + ":jobmanager"))) + .waitingFor( + new LogMessageWaitStrategy() + .withRegEx(".*Starting the resource manager.*") + .withStartupTimeout(Duration.ofMinutes(2))) + ; + + // Copy groot-stream bootstrap and some other files to the container + copyGrootStreamStarterToContainer(jobManager); + copyGrootStreamStarterLoggingToContainer(jobManager); + + jobManager.setPortBindings(Lists.newArrayList(String.format("%s:%s", 8999, 8081))); + + taskManager = + new GenericContainer<>(dockerImage) + .withCommand("taskmanager") + .withNetwork(NETWORK) + .withNetworkAliases("taskmanager") + .withEnv("FLINK_PROPERTIES", properties) + .dependsOn(jobManager) + .withLogConsumer( + new Slf4jLogConsumer( + DockerLoggerFactory.getLogger( + dockerImage + ":taskmanager"))) + .waitingFor( + new LogMessageWaitStrategy() + .withRegEx( + ".*Successful registration at resource manager.*") + .withStartupTimeout(Duration.ofMinutes(2))); + + // Copy groot-stream bootstrap and some other files to the container + copyGrootStreamStarterToContainer(taskManager); + copyGrootStreamStarterLoggingToContainer(taskManager); + + Startables.deepStart(Stream.of(jobManager)).join(); + Startables.deepStart(Stream.of(taskManager)).join(); + // execute extra commands + executeExtraCommands(jobManager); + } + + protected List getFlinkProperties() { + return DEFAULT_FLINK_PROPERTIES; + } + + @Override + public void tearDown() throws Exception { + if (taskManager != null) { + taskManager.stop(); + } + if (jobManager != null) { + jobManager.stop(); + } + } + + @Override + protected String getSavePointCommand() { + throw new UnsupportedOperationException("Not implemented"); + } + + @Override + protected String getRestoreCommand() { + throw new UnsupportedOperationException("Not implemented"); + } + + @Override + protected List getExtraStartShellCommands() { + return Collections.emptyList(); + } + + + public void executeExtraCommands(ContainerExtendedFactory extendedFactory) + throws IOException, InterruptedException { + extendedFactory.extend(jobManager); + extendedFactory.extend(taskManager); + } + + + @Override + public Container.ExecResult executeJob(String confFile) + throws IOException, InterruptedException { + return executeJob(confFile, null); + } + + @Override + public Container.ExecResult executeJob(String confFile, List variables) + throws IOException, InterruptedException { + log.info("test in container: {}", identifier()); + return executeJob(jobManager, confFile, variables); + } + + @Override + public String getServerLogs() { + return jobManager.getLogs() + "\n" + taskManager.getLogs(); + } + + + public String executeJobManagerInnerCommand(String command) + throws IOException, InterruptedException { + return jobManager.execInContainer("bash", "-c", command).getStdout(); + } + + + + +} diff --git a/groot-tests/test-e2e-common/src/test/java/com/geedgenetworks/test/e2e/common/container/ContainerExtendedFactory.java b/groot-tests/test-e2e-common/src/test/java/com/geedgenetworks/test/e2e/common/container/ContainerExtendedFactory.java new file mode 100644 index 0000000..6945432 --- /dev/null +++ b/groot-tests/test-e2e-common/src/test/java/com/geedgenetworks/test/e2e/common/container/ContainerExtendedFactory.java @@ -0,0 +1,11 @@ +package com.geedgenetworks.test.e2e.common.container; + +import org.testcontainers.containers.GenericContainer; + +import java.io.IOException; + +@FunctionalInterface +public interface ContainerExtendedFactory { + void extend(GenericContainer engineMasterContainer) throws IOException, InterruptedException; + +} diff --git a/groot-tests/test-e2e-common/src/test/java/com/geedgenetworks/test/e2e/common/container/ContainerUtil.java b/groot-tests/test-e2e-common/src/test/java/com/geedgenetworks/test/e2e/common/container/ContainerUtil.java new file mode 100644 index 0000000..0e6f3fd --- /dev/null +++ b/groot-tests/test-e2e-common/src/test/java/com/geedgenetworks/test/e2e/common/container/ContainerUtil.java @@ -0,0 +1,366 @@ +package com.geedgenetworks.test.e2e.common.container; + +import cn.hutool.core.util.XmlUtil; +import com.alibaba.fastjson2.JSONObject; +import com.geedgenetworks.bootstrap.utils.ConfigBuilder; +import com.google.common.collect.Lists; +import com.typesafe.config.Config; +import com.typesafe.config.ConfigFactory; +import com.typesafe.config.ConfigResolveOptions; +import groovy.lang.Tuple2; +import lombok.extern.slf4j.Slf4j; +import org.apache.commons.lang3.StringUtils; +import org.junit.jupiter.api.Assertions; +import org.testcontainers.containers.Container; +import org.testcontainers.containers.GenericContainer; +import org.testcontainers.utility.MountableFile; +import org.w3c.dom.Document; +import org.w3c.dom.Element; +import java.io.File; +import java.io.IOException; +import java.nio.file.Path; +import java.nio.file.Paths; +import java.util.*; +import java.util.regex.Matcher; +import java.util.regex.Pattern; +import java.util.stream.Collectors; + +@Slf4j +public final class ContainerUtil { + + public static final String PROJECT_ROOT_PATH = getProjectRootPath(); + public static final String PLUGIN_MAPPING_FILE = "plugin-mapping.properties"; + + + private static String getProjectRootPath() { + String testCommonRootModuleDir = "groot-tests"; + Path path = Paths.get(System.getProperty("user.dir")); + while (!path.endsWith(Paths.get(testCommonRootModuleDir))) { + path = path.getParent(); + } + return path.getParent().toString(); + } + + public static String getProjectVersion() { + String pomFile = PROJECT_ROOT_PATH + File.separator + "pom.xml"; + checkPathExist(pomFile); + Document docResult = XmlUtil.readXML(new File(pomFile)); + Element project = XmlUtil.getRootElement(docResult); + Element properties = XmlUtil.getElement(project, "properties"); + Element revisionElement = XmlUtil.getElement(properties, "revision"); + return revisionElement.getTextContent(); + } + + + public static void checkPathExist(String path) { + Assertions.assertTrue(new File(path).exists(), path + " must exist"); + } + + public static void copyGrootStreamStarterToContainer( + GenericContainer container, + String startModuleName, + String startModulePath, + String GrootStreamHomeInContainer) { + + final String[] splits = StringUtils.split(startModuleName, File.separator); + final String startJarName = splits[splits.length - 1] + ".jar"; + final String startJarPath = + startModulePath + File.separator + "target" + File.separator + startJarName; + checkPathExist(startJarPath); + + // don't use container#withFileSystemBind, this isn't supported in Windows. + container.withCopyFileToContainer( + MountableFile.forHostPath(startJarPath), + Paths.get(GrootStreamHomeInContainer, "bootstrap", startJarName).toString()); + + + // copy libs + + String formatJsonJar = "format-json-" + getProjectVersion() + ".jar"; + Path formatJsonJarPath = + Paths.get(PROJECT_ROOT_PATH, "groot-formats/format-json", "target", formatJsonJar); + container.withCopyFileToContainer( + MountableFile.forHostPath(formatJsonJarPath), + Paths.get(GrootStreamHomeInContainer, "lib", formatJsonJar).toString()); + + String formatProtobufJar = "format-protobuf-" + getProjectVersion() + ".jar"; + Path formatProtobufJarPath = + Paths.get(PROJECT_ROOT_PATH, "groot-formats/format-protobuf", "target", formatProtobufJar); + container.withCopyFileToContainer( + MountableFile.forHostPath(formatProtobufJarPath), + Paths.get(GrootStreamHomeInContainer, "lib", formatProtobufJar).toString()); + + String formatMsgpackJar = "format-msgpack-" + getProjectVersion() + ".jar"; + Path formatMsgpackJarPath = + Paths.get(PROJECT_ROOT_PATH, "groot-formats/format-msgpack", "target", formatMsgpackJar); + container.withCopyFileToContainer( MountableFile.forHostPath(formatMsgpackJarPath), + Paths.get(GrootStreamHomeInContainer, "lib", formatMsgpackJar).toString()); + + String formatRawJar = "format-raw-" + getProjectVersion() + ".jar"; + Path formatRawJarPath = + Paths.get(PROJECT_ROOT_PATH, "groot-formats/format-raw", "target", formatRawJar); + container.withCopyFileToContainer( MountableFile.forHostPath(formatRawJarPath), + Paths.get(GrootStreamHomeInContainer, "lib", formatRawJar).toString()); + + + //copy system config + final String configPath = PROJECT_ROOT_PATH + "/config"; + checkPathExist(configPath); + container.withCopyFileToContainer(MountableFile.forHostPath(configPath), + Paths.get(GrootStreamHomeInContainer, "config").toString()); + + // copy grootstream.yaml + final String grootTestsCommonPath = PROJECT_ROOT_PATH + "/groot-tests/test-e2e-common/src/test/resources"; + checkPathExist(grootTestsCommonPath); + container.withCopyFileToContainer( + MountableFile.forHostPath(grootTestsCommonPath + "/grootstream.yaml"), + Paths.get(GrootStreamHomeInContainer, "config", "grootstream.yaml").toString()); + + + // copy bin + final String startBinPath = startModulePath + File.separator + "src/main/bin/"; + checkPathExist(startBinPath); + container.withCopyFileToContainer( + MountableFile.forHostPath(startBinPath), + Paths.get(GrootStreamHomeInContainer, "bin").toString()); + + // copy plugin-mapping.properties + container.withCopyFileToContainer( + MountableFile.forHostPath(PROJECT_ROOT_PATH + "/plugin-mapping.properties"), + Paths.get(GrootStreamHomeInContainer, "connectors", PLUGIN_MAPPING_FILE).toString()); + + + + } + + public static void copyGrootStreamStarterLoggingToContainer( + GenericContainer container, + String startModulePath, + String GrootStreamHomeInContainer) { + // copy logging lib + final String loggingLibPath = + startModulePath + + File.separator + + "target" + + File.separator + + "logging-e2e" + + File.separator; + checkPathExist(loggingLibPath); + container.withCopyFileToContainer( + MountableFile.forHostPath(loggingLibPath), + Paths.get(GrootStreamHomeInContainer, "bootstrap", "logging").toString()); + } + + public static String copyConfigFileToContainer(GenericContainer container, String confFile) { + final String targetConfInContainer = Paths.get("/tmp", confFile).toString(); + container.copyFileToContainer( + MountableFile.forHostPath(getResourcesFile(confFile).getAbsolutePath()), + targetConfInContainer); + return targetConfInContainer; + } + + public static File getResourcesFile(String confFile) { + File file = new File(getCurrentModulePath() + "/src/test/resources" + confFile); + if (file.exists()) { + return file; + } + throw new IllegalArgumentException(confFile + " doesn't exist"); + } + + public static Path getCurrentModulePath() { + return Paths.get(System.getProperty("user.dir")); + } + + public static void copyConnectorJarToContainer( + GenericContainer container, + String confFile, + String connectorsRootPath, + String connectorPrefix, + String connectorType, + String grootStreamHome) { + Config jobConfig = getJobConfig(getResourcesFile(confFile)); + Config connectorsMapping = + getPluginProperties(new File(PROJECT_ROOT_PATH + File.separator + PLUGIN_MAPPING_FILE)); + if (!connectorsMapping.hasPath(connectorType) + || connectorsMapping.getConfig(connectorType).isEmpty()) { + return; + } + Config connectors = connectorsMapping.getConfig(connectorType); + Set connectorNames = getConnectors(jobConfig, connectors, "source"); + connectorNames.addAll(getConnectors(jobConfig, connectors, "sink")); + File module = new File(PROJECT_ROOT_PATH + File.separator + connectorsRootPath); + + List connectorFiles = getConnectorFiles(module, connectorNames, connectorPrefix); + connectorFiles.forEach( + jar -> + container.copyFileToContainer( + MountableFile.forHostPath(jar.getAbsolutePath()), + Paths.get(grootStreamHome, "connectors", jar.getName()).toString())); + } + + public static String adaptPathForWin(String path) { + // Running IT use cases under Windows requires replacing \ with / + return path == null ? "" : path.replaceAll("\\\\", "/"); + } + + + public static List extractJsonFromServerLogs(String logs) { + List jsons = new ArrayList<>(); + Pattern jsonPattern = Pattern.compile("-\\s(\\{.*?\\})"); + Matcher matcher = jsonPattern.matcher(logs); + while (matcher.find()) { + jsons.add(JSONObject.parseObject(matcher.group(1))); + } + return jsons; + } + + + + private static List getConnectorFiles( + File currentModule, Set connectorNames, String connectorPrefix) { + List connectorFiles = new ArrayList<>(); + for (File file : Objects.requireNonNull(currentModule.listFiles())) { + getConnectorFiles(file, connectorNames, connectorPrefix, connectorFiles); + } + return connectorFiles; + } + + private static void getConnectorFiles( + File currentModule, + Set connectorNames, + String connectorPrefix, + List connectors) { + if (currentModule.isFile() || connectorNames.size() == connectors.size()) { + return; + } + if (connectorNames.contains(currentModule.getName())) { + File targetPath = new File(currentModule.getAbsolutePath() + File.separator + "target"); + for (File file : Objects.requireNonNull(targetPath.listFiles())) { + if (file.getName().startsWith(currentModule.getName()) + && !file.getName().endsWith("javadoc.jar") + && !file.getName().endsWith("tests.jar")) { + connectors.add(file); + return; + } + } + } + + if (currentModule.getName().startsWith(connectorPrefix)) { + for (File file : Objects.requireNonNull(currentModule.listFiles())) { + getConnectorFiles(file, connectorNames, connectorPrefix, connectors); + } + } + } + + public static List discoverTestContainers() { + try { + final List result = new LinkedList<>(); + ServiceLoader.load(TestContainer.class, Thread.currentThread().getContextClassLoader()) + .iterator() + .forEachRemaining(result::add); + return result; + } catch (ServiceConfigurationError e) { + log.error("Could not load service provider for containers.", e); + throw new RuntimeException("Could not load service provider for containers.", e); + } + } + + private static Set getConnectors( + Config jobConfig, Config connectorsMap, String pluginType) { + // using specific needed plugin type in the job config + Config connectorConfig = jobConfig.getConfig(pluginType+"s"); + List connectorList = Lists.newArrayList(); + connectorConfig.root().unwrapped().forEach((key,value) -> { + Map map = (Map) value; + connectorList.add(map.get("type").toString()); + + }); + + Map connectors = new TreeMap<>(String.CASE_INSENSITIVE_ORDER); + connectorsMap.getConfig(pluginType).entrySet().forEach(entry -> { + connectors.put(entry.getKey(), entry.getValue().unwrapped().toString()); + }); + + return connectorList.stream() + .map(String::toLowerCase) + .filter(connectors::containsKey) + .map(connectors::get) + .collect(Collectors.toSet()); + } + + private static Config getJobConfig(File file) { + return ConfigBuilder.of(file.getAbsolutePath()); + } + + private static Config getPluginProperties(File file) { + return ConfigFactory.parseFile(file) + .resolve(ConfigResolveOptions.defaults().setAllowUnresolved(true)) + .resolveWith( + ConfigFactory.systemProperties(), + ConfigResolveOptions.defaults().setAllowUnresolved(true)); + } + + public static List getJVMThreadNames(GenericContainer container) + throws IOException, InterruptedException { + return getJVMThreads(container).stream().map(Tuple2::getV1).collect(Collectors.toList()); + } + + public static Map getJVMLiveObject(GenericContainer container) + throws IOException, InterruptedException { + Container.ExecResult liveObjects = + container.execInContainer("jmap", "-histo:live", getJVMProcessId(container)); + Assertions.assertEquals(0, liveObjects.getExitCode()); + String value = liveObjects.getStdout().trim(); + return Arrays.stream(value.split("\n")) + .skip(2) + .map( + str -> + Arrays.stream(str.split(" ")) + .filter(StringUtils::isNotEmpty) + .collect(Collectors.toList())) + .filter(list -> list.size() == 4) + .collect( + Collectors.toMap( + list -> list.get(3), + list -> Integer.valueOf(list.get(1)), + (a, b) -> a)); + } + + public static List> getJVMThreads(GenericContainer container) + throws IOException, InterruptedException { + Container.ExecResult threads = + container.execInContainer("jstack", getJVMProcessId(container)); + Assertions.assertEquals(0, threads.getExitCode()); + // Thread name line example + // "hz.main.MetricsRegistry.thread-2" #232 prio=5 os_prio=0 tid=0x0000ffff3c003000 nid=0x5e + // waiting on condition [0x0000ffff6cf3a000] + return Arrays.stream(threads.getStdout().trim().split("\n\n")) + .filter(s -> s.startsWith("\"")) + .map( + threadStr -> + new Tuple2<>( + Arrays.stream(threadStr.split("\n")) + .filter(s -> s.startsWith("\"")) + .map(s -> s.substring(1, s.lastIndexOf("\""))) + .findFirst() + .get(), + threadStr)) + .collect(Collectors.toList()); + } + + private static String getJVMProcessId(GenericContainer container) + throws IOException, InterruptedException { + Container.ExecResult processes = container.execInContainer("jps"); + Assertions.assertEquals(0, processes.getExitCode()); + Optional server = + Arrays.stream(processes.getStdout().trim().split("\n")) + .filter(s -> s.contains("GrootstreamServer")) + .findFirst(); + Assertions.assertTrue(server.isPresent()); + return server.get().trim().split(" ")[0]; + } + + + + +} diff --git a/groot-tests/test-e2e-common/src/test/java/com/geedgenetworks/test/e2e/common/container/EngineType.java b/groot-tests/test-e2e-common/src/test/java/com/geedgenetworks/test/e2e/common/container/EngineType.java new file mode 100644 index 0000000..1447c51 --- /dev/null +++ b/groot-tests/test-e2e-common/src/test/java/com/geedgenetworks/test/e2e/common/container/EngineType.java @@ -0,0 +1,16 @@ +package com.geedgenetworks.test.e2e.common.container; + +import lombok.AllArgsConstructor; +import lombok.Getter; + +@Getter +@AllArgsConstructor +public enum EngineType { + FLINK("Flink"); + private final String name; + @Override + public String toString() { + return name; + } + +} diff --git a/groot-tests/test-e2e-common/src/test/java/com/geedgenetworks/test/e2e/common/container/Flink13Container.java b/groot-tests/test-e2e-common/src/test/java/com/geedgenetworks/test/e2e/common/container/Flink13Container.java new file mode 100644 index 0000000..c672784 --- /dev/null +++ b/groot-tests/test-e2e-common/src/test/java/com/geedgenetworks/test/e2e/common/container/Flink13Container.java @@ -0,0 +1,47 @@ +package com.geedgenetworks.test.e2e.common.container; + +import com.google.auto.service.AutoService; +import lombok.NoArgsConstructor; + +@NoArgsConstructor +@AutoService(TestContainer.class) +public class Flink13Container extends AbstractTestFlinkContainer { + + @Override + protected String getStartModuleName() { + return "groot-bootstrap"; + + } + + @Override + protected String getStartShellName() { + return "start.sh"; + } + + @Override + protected String getConnectorModulePath() { + return "groot-connectors"; + } + + @Override + protected String getConnectorType() { + return "grootstream"; + } + + @Override + protected String getConnectorNamePrefix() { + return "connector-"; + } + + @Override + public TestContainerId identifier() { + return TestContainerId.FLINK_1_13; + } + + @Override + protected String getDockerImage() { + return "192.168.40.153:8082/common/flink:1.13.1-scala_2.11-java11"; + } + + +} diff --git a/groot-tests/test-e2e-common/src/test/java/com/geedgenetworks/test/e2e/common/container/Flink17Container.java b/groot-tests/test-e2e-common/src/test/java/com/geedgenetworks/test/e2e/common/container/Flink17Container.java new file mode 100644 index 0000000..4a5d14f --- /dev/null +++ b/groot-tests/test-e2e-common/src/test/java/com/geedgenetworks/test/e2e/common/container/Flink17Container.java @@ -0,0 +1,43 @@ +package com.geedgenetworks.test.e2e.common.container; + +import com.google.auto.service.AutoService; +import lombok.NoArgsConstructor; + +@NoArgsConstructor +@AutoService(TestContainer.class) +public class Flink17Container extends AbstractTestFlinkContainer { + + @Override + protected String getStartModuleName() { + return "groot-bootstrap"; + } + + @Override + protected String getStartShellName() { + return "start.sh"; + } + + @Override + protected String getConnectorModulePath() { + return "groot-connectors"; + } + + @Override + protected String getConnectorType() { + return "grootstream"; + } + + @Override + protected String getConnectorNamePrefix() { + return "connector-"; + } + + @Override + public TestContainerId identifier() { + return TestContainerId.FLINK_1_17 ; + } + @Override + protected String getDockerImage() { + return "flink:1.17.2-scala_2.12-java11"; + } +} diff --git a/groot-tests/test-e2e-common/src/test/java/com/geedgenetworks/test/e2e/common/container/TestContainer.java b/groot-tests/test-e2e-common/src/test/java/com/geedgenetworks/test/e2e/common/container/TestContainer.java new file mode 100644 index 0000000..e8b9388 --- /dev/null +++ b/groot-tests/test-e2e-common/src/test/java/com/geedgenetworks/test/e2e/common/container/TestContainer.java @@ -0,0 +1,36 @@ +package com.geedgenetworks.test.e2e.common.container; + +import com.geedgenetworks.test.e2e.common.TestResource; +import org.testcontainers.containers.Container; +import org.testcontainers.containers.Network; + +import java.io.IOException; +import java.util.List; + +public interface TestContainer extends TestResource { + Network NETWORK = Network.newNetwork(); + TestContainerId identifier(); + + void executeExtraCommands(ContainerExtendedFactory extendedFactory) + throws IOException, InterruptedException; + + Container.ExecResult executeJob(String confFile) throws IOException, InterruptedException; + + Container.ExecResult executeJob(String confFile, List variables) + throws IOException, InterruptedException; + + default Container.ExecResult savepointJob(String jobId) + throws IOException, InterruptedException { + throw new UnsupportedOperationException("Not implemented"); + } + + default Container.ExecResult restoreJob(String confFile, String jobId) + throws IOException, InterruptedException { + throw new UnsupportedOperationException("Not implemented"); + } + + String getServerLogs(); + + + +} diff --git a/groot-tests/test-e2e-common/src/test/java/com/geedgenetworks/test/e2e/common/container/TestContainerId.java b/groot-tests/test-e2e-common/src/test/java/com/geedgenetworks/test/e2e/common/container/TestContainerId.java new file mode 100644 index 0000000..6e51720 --- /dev/null +++ b/groot-tests/test-e2e-common/src/test/java/com/geedgenetworks/test/e2e/common/container/TestContainerId.java @@ -0,0 +1,20 @@ +package com.geedgenetworks.test.e2e.common.container; + +import lombok.AllArgsConstructor; +import lombok.Getter; + +import static com.geedgenetworks.test.e2e.common.container.EngineType.FLINK; +@Getter +@AllArgsConstructor +public enum TestContainerId { + FLINK_1_13(FLINK, "1.13.1"), + FLINK_1_17(FLINK, "1.17.2"); + private final EngineType engineType; + private final String version; + + @Override + public String toString() { + return engineType.toString() + ":" + version; + } + +} diff --git a/groot-tests/test-e2e-common/src/test/java/com/geedgenetworks/test/e2e/common/container/TestContainersFactory.java b/groot-tests/test-e2e-common/src/test/java/com/geedgenetworks/test/e2e/common/container/TestContainersFactory.java new file mode 100644 index 0000000..7406c28 --- /dev/null +++ b/groot-tests/test-e2e-common/src/test/java/com/geedgenetworks/test/e2e/common/container/TestContainersFactory.java @@ -0,0 +1,7 @@ +package com.geedgenetworks.test.e2e.common.container; + +import java.util.List; + +public interface TestContainersFactory { + List create(); +} diff --git a/groot-tests/test-e2e-common/src/test/java/com/geedgenetworks/test/e2e/common/container/TestHelper.java b/groot-tests/test-e2e-common/src/test/java/com/geedgenetworks/test/e2e/common/container/TestHelper.java new file mode 100644 index 0000000..ac4f466 --- /dev/null +++ b/groot-tests/test-e2e-common/src/test/java/com/geedgenetworks/test/e2e/common/container/TestHelper.java @@ -0,0 +1,23 @@ +package com.geedgenetworks.test.e2e.common.container; + +import org.junit.jupiter.api.Assertions; +import org.testcontainers.containers.Container; + +import java.io.IOException; + +public class TestHelper { + private final TestContainer container; + + public TestHelper(TestContainer container) { + this.container = container; + } + + public void execute(String file) throws IOException, InterruptedException { + execute(0, file); + } + + public void execute(int exceptResult, String file) throws IOException, InterruptedException { + Container.ExecResult result = container.executeJob(file); + Assertions.assertEquals(exceptResult, result.getExitCode(), result.getStderr()); + } +} diff --git a/groot-tests/test-e2e-common/src/test/java/com/geedgenetworks/test/e2e/common/junit/AnnotationUtil.java b/groot-tests/test-e2e-common/src/test/java/com/geedgenetworks/test/e2e/common/junit/AnnotationUtil.java new file mode 100644 index 0000000..af623c6 --- /dev/null +++ b/groot-tests/test-e2e-common/src/test/java/com/geedgenetworks/test/e2e/common/junit/AnnotationUtil.java @@ -0,0 +1,37 @@ +package com.geedgenetworks.test.e2e.common.junit; + +import com.geedgenetworks.test.e2e.common.container.TestContainer; +import com.geedgenetworks.test.e2e.common.container.TestContainerId; +import com.geedgenetworks.test.e2e.common.container.EngineType; +import lombok.AccessLevel; +import lombok.NoArgsConstructor; +import org.junit.platform.commons.util.AnnotationUtils; +import java.lang.reflect.AnnotatedElement; +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; +import java.util.stream.Collectors; + + +@NoArgsConstructor(access = AccessLevel.PRIVATE) +public class AnnotationUtil { + public static List filterDisabledContainers( + List containers, AnnotatedElement annotatedElement) { + // Filters disabled containers + final List disabledContainers = new ArrayList<>(); + final List disabledEngineTypes = new ArrayList<>(); + AnnotationUtils.findAnnotation(annotatedElement, DisabledOnContainer.class) + .ifPresent( + annotation -> { + Collections.addAll(disabledContainers, annotation.value()); + Collections.addAll(disabledEngineTypes, annotation.type()); + }); + return containers.stream() + .filter(container -> !disabledContainers.contains(container.identifier())) + .filter( + container -> + !disabledEngineTypes.contains( + container.identifier().getEngineType())) + .collect(Collectors.toList()); + } +} diff --git a/groot-tests/test-e2e-common/src/test/java/com/geedgenetworks/test/e2e/common/junit/ContainerTestingExtension.java b/groot-tests/test-e2e-common/src/test/java/com/geedgenetworks/test/e2e/common/junit/ContainerTestingExtension.java new file mode 100644 index 0000000..57a6df9 --- /dev/null +++ b/groot-tests/test-e2e-common/src/test/java/com/geedgenetworks/test/e2e/common/junit/ContainerTestingExtension.java @@ -0,0 +1,84 @@ +package com.geedgenetworks.test.e2e.common.junit; + +import com.geedgenetworks.test.e2e.common.container.ContainerExtendedFactory; +import com.geedgenetworks.test.e2e.common.container.TestContainer; +import com.geedgenetworks.test.e2e.common.container.TestContainersFactory; +import org.junit.jupiter.api.extension.AfterAllCallback; +import org.junit.jupiter.api.extension.BeforeAllCallback; +import org.junit.jupiter.api.extension.ExtensionContext; +import org.junit.platform.commons.support.AnnotationSupport; + +import java.lang.annotation.Annotation; +import java.util.Collection; +import java.util.List; + +public class ContainerTestingExtension implements BeforeAllCallback, AfterAllCallback { + public static final ExtensionContext.Namespace TEST_RESOURCE_NAMESPACE = + ExtensionContext.Namespace.create("testResourceNamespace"); + public static final String TEST_CONTAINERS_STORE_KEY = "testContainers"; + public static final String TEST_EXTENDED_FACTORY_STORE_KEY = "testContainerExtendedFactory"; + + @Override + public void beforeAll(ExtensionContext context) throws Exception { + + List containerExtendedFactories = + AnnotationSupport.findAnnotatedFieldValues( + context.getRequiredTestInstance(), + TestContainerExtension.class, + ContainerExtendedFactory.class); + checkAtMostOneAnnotationField(containerExtendedFactories, TestContainerExtension.class); + ContainerExtendedFactory containerExtendedFactory = container -> {}; + if (!containerExtendedFactories.isEmpty()) { + containerExtendedFactory = containerExtendedFactories.get(0); + } + context.getStore(TEST_RESOURCE_NAMESPACE) + .put(TEST_EXTENDED_FACTORY_STORE_KEY, containerExtendedFactory); + + List containersFactories = + AnnotationSupport.findAnnotatedFieldValues( + context.getRequiredTestInstance(), + TestContainers.class, + TestContainersFactory.class); + + checkExactlyOneAnnotatedField(containersFactories, TestContainers.class); + + List testContainers = + AnnotationUtil.filterDisabledContainers( + containersFactories.get(0).create(), + context.getRequiredTestInstance().getClass()); + context.getStore(TEST_RESOURCE_NAMESPACE).put(TEST_CONTAINERS_STORE_KEY, testContainers); + + } + + @Override + public void afterAll(ExtensionContext context) throws Exception { + context.getStore(TEST_RESOURCE_NAMESPACE).remove(TEST_CONTAINERS_STORE_KEY); + } + + + + + private void checkExactlyOneAnnotatedField( + Collection fields, Class annotation) { + checkAtMostOneAnnotationField(fields, annotation); + checkAtLeastOneAnnotationField(fields, annotation); + } + + private void checkAtLeastOneAnnotationField( + Collection fields, Class annotation) { + if (fields.isEmpty()) { + throw new IllegalStateException( + String.format( + "No fields are annotated with '@%s'", annotation.getSimpleName())); + } + } + private void checkAtMostOneAnnotationField( + Collection fields, Class annotation) { + if (fields.size() > 1) { + throw new IllegalStateException( + String.format( + "Multiple fields are annotated with '@%s'", + annotation.getSimpleName())); + } + } +} diff --git a/groot-tests/test-e2e-common/src/test/java/com/geedgenetworks/test/e2e/common/junit/DisabledOnContainer.java b/groot-tests/test-e2e-common/src/test/java/com/geedgenetworks/test/e2e/common/junit/DisabledOnContainer.java new file mode 100644 index 0000000..4147ac5 --- /dev/null +++ b/groot-tests/test-e2e-common/src/test/java/com/geedgenetworks/test/e2e/common/junit/DisabledOnContainer.java @@ -0,0 +1,22 @@ +package com.geedgenetworks.test.e2e.common.junit; + +import com.geedgenetworks.test.e2e.common.container.EngineType; +import com.geedgenetworks.test.e2e.common.container.TestContainerId; + +import java.lang.annotation.*; + +@Target({ElementType.TYPE, ElementType.METHOD}) +@Retention(RetentionPolicy.RUNTIME) +@Inherited +public @interface DisabledOnContainer { + TestContainerId[] value(); + EngineType[] type() default {}; + + /** + * Custom reason to provide if the test container is disabled. + * + *

If a custom reason is supplied, it will be combined with the default reason for this + * annotation. If a custom reason is not supplied, the default reason will be used. + */ + String disabledReason() default ""; +} diff --git a/groot-tests/test-e2e-common/src/test/java/com/geedgenetworks/test/e2e/common/junit/TestCaseInvocationContextProvider.java b/groot-tests/test-e2e-common/src/test/java/com/geedgenetworks/test/e2e/common/junit/TestCaseInvocationContextProvider.java new file mode 100644 index 0000000..f66603e --- /dev/null +++ b/groot-tests/test-e2e-common/src/test/java/com/geedgenetworks/test/e2e/common/junit/TestCaseInvocationContextProvider.java @@ -0,0 +1,114 @@ +package com.geedgenetworks.test.e2e.common.junit; + +import com.geedgenetworks.test.e2e.common.container.ContainerExtendedFactory; +import com.geedgenetworks.test.e2e.common.container.TestContainer; +import lombok.SneakyThrows; +import lombok.extern.slf4j.Slf4j; +import org.junit.jupiter.api.extension.*; + +import java.util.Arrays; +import java.util.List; +import java.util.stream.Stream; +import static com.geedgenetworks.test.e2e.common.junit.ContainerTestingExtension.TEST_CONTAINERS_STORE_KEY; +import static com.geedgenetworks.test.e2e.common.junit.ContainerTestingExtension.TEST_EXTENDED_FACTORY_STORE_KEY; +import static com.geedgenetworks.test.e2e.common.junit.ContainerTestingExtension.TEST_RESOURCE_NAMESPACE; +@Slf4j +public class TestCaseInvocationContextProvider implements TestTemplateInvocationContextProvider { + @Override + public boolean supportsTestTemplate(ExtensionContext context) { + // Only support test cases with TestContainer as parameter + Class[] parameterTypes = context.getRequiredTestMethod().getParameterTypes(); + return parameterTypes.length == 1 + && Arrays.stream(parameterTypes).anyMatch(TestContainer.class::isAssignableFrom); + } + + @Override + public Stream provideTestTemplateInvocationContexts( + ExtensionContext context) { + List testContainers = + AnnotationUtil.filterDisabledContainers( + (List) + context.getStore(TEST_RESOURCE_NAMESPACE) + .get(TEST_CONTAINERS_STORE_KEY), + context.getRequiredTestMethod()); + + ContainerExtendedFactory containerExtendedFactory = + (ContainerExtendedFactory) + context.getStore(TEST_RESOURCE_NAMESPACE) + .get(TEST_EXTENDED_FACTORY_STORE_KEY); + + int containerAmount = testContainers.size(); + return testContainers.stream() + .map( + testContainer -> + new TestResourceProvidingInvocationContext( + testContainer, containerExtendedFactory, containerAmount)); + } + static class TestResourceProvidingInvocationContext implements TestTemplateInvocationContext { + private final TestContainer testContainer; + private final ContainerExtendedFactory containerExtendedFactory; + private final Integer containerAmount; + + public TestResourceProvidingInvocationContext( + TestContainer testContainer, + ContainerExtendedFactory containerExtendedFactory, + int containerAmount) { + this.testContainer = testContainer; + this.containerExtendedFactory = containerExtendedFactory; + this.containerAmount = containerAmount; + } + + @Override + public String getDisplayName(int invocationIndex) { + return String.format( + "TestContainer(%s/%s): %s", + invocationIndex, containerAmount, testContainer.identifier()); + } + + @Override + public List getAdditionalExtensions() { + return Arrays.asList( + // Extension for injecting parameters + new TestContainerResolver(testContainer, containerExtendedFactory), + // Extension for closing test container + (AfterTestExecutionCallback) + ignore -> { + testContainer.tearDown(); + log.info( + "The TestContainer[{}] is closed.", + testContainer.identifier()); + }); + } + } + + private static class TestContainerResolver implements ParameterResolver { + + private final TestContainer testContainer; + private final ContainerExtendedFactory containerExtendedFactory; + + private TestContainerResolver( + TestContainer testContainer, ContainerExtendedFactory containerExtendedFactory) { + this.testContainer = testContainer; + this.containerExtendedFactory = containerExtendedFactory; + } + + @Override + public boolean supportsParameter( + ParameterContext parameterContext, ExtensionContext extensionContext) + throws ParameterResolutionException { + return TestContainer.class.isAssignableFrom(parameterContext.getParameter().getType()); + } + + @SneakyThrows + @Override + public Object resolveParameter( + ParameterContext parameterContext, ExtensionContext extensionContext) + throws ParameterResolutionException { + testContainer.startUp(); + testContainer.executeExtraCommands(containerExtendedFactory); + log.info("The TestContainer[{}] is running.", testContainer.identifier()); + return this.testContainer; + } + } + +} diff --git a/groot-tests/test-e2e-common/src/test/java/com/geedgenetworks/test/e2e/common/junit/TestContainerExtension.java b/groot-tests/test-e2e-common/src/test/java/com/geedgenetworks/test/e2e/common/junit/TestContainerExtension.java new file mode 100644 index 0000000..1f140df --- /dev/null +++ b/groot-tests/test-e2e-common/src/test/java/com/geedgenetworks/test/e2e/common/junit/TestContainerExtension.java @@ -0,0 +1,12 @@ +package com.geedgenetworks.test.e2e.common.junit; + +import java.lang.annotation.ElementType; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; + +@Target(ElementType.FIELD) +@Retention(RetentionPolicy.RUNTIME) +public @interface TestContainerExtension { + +} diff --git a/groot-tests/test-e2e-common/src/test/java/com/geedgenetworks/test/e2e/common/junit/TestContainers.java b/groot-tests/test-e2e-common/src/test/java/com/geedgenetworks/test/e2e/common/junit/TestContainers.java new file mode 100644 index 0000000..64d536e --- /dev/null +++ b/groot-tests/test-e2e-common/src/test/java/com/geedgenetworks/test/e2e/common/junit/TestContainers.java @@ -0,0 +1,11 @@ +package com.geedgenetworks.test.e2e.common.junit; + +import java.lang.annotation.ElementType; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; + +@Target(ElementType.FIELD) +@Retention(RetentionPolicy.RUNTIME) +public @interface TestContainers { +} diff --git a/groot-tests/test-e2e-common/src/test/java/com/geedgenetworks/test/e2e/common/junit/TestLoggerExtension.java b/groot-tests/test-e2e-common/src/test/java/com/geedgenetworks/test/e2e/common/junit/TestLoggerExtension.java new file mode 100644 index 0000000..e6ec46a --- /dev/null +++ b/groot-tests/test-e2e-common/src/test/java/com/geedgenetworks/test/e2e/common/junit/TestLoggerExtension.java @@ -0,0 +1,60 @@ +package com.geedgenetworks.test.e2e.common.junit; + +import org.junit.jupiter.api.extension.BeforeEachCallback; +import org.junit.jupiter.api.extension.ExtensionContext; +import org.junit.jupiter.api.extension.TestWatcher; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.PrintWriter; +import java.io.StringWriter; + +public class TestLoggerExtension implements TestWatcher, BeforeEachCallback { + private static final Logger LOG = LoggerFactory.getLogger(TestLoggerExtension.class); + @Override + public void beforeEach(ExtensionContext context) { + LOG.info( + "\n================================================================================" + + "\nTest {}.{} is running." + + "\n--------------------------------------------------------------------------------", + context.getRequiredTestClass().getCanonicalName(), + context.getRequiredTestMethod().getName()); + } + + @Override + public void testSuccessful(ExtensionContext context) { + LOG.info( + "\n--------------------------------------------------------------------------------" + + "\nTest {}.{} successfully run." + + "\n================================================================================", + context.getRequiredTestClass().getCanonicalName(), + context.getRequiredTestMethod().getName()); + } + + @Override + public void testFailed(ExtensionContext context, Throwable cause) { + LOG.error( + "\n--------------------------------------------------------------------------------" + + "\nTest {}.{} failed with:\n{}" + + "\n================================================================================", + context.getRequiredTestClass().getCanonicalName(), + context.getRequiredTestMethod().getName(), + exceptionToString(cause)); + } + + private static String exceptionToString(Throwable t) { + if (t == null) { + return "(null)"; + } + + try { + StringWriter stm = new StringWriter(); + PrintWriter wrt = new PrintWriter(stm); + t.printStackTrace(wrt); + wrt.close(); + return stm.toString(); + } catch (Throwable ignored) { + return t.getClass().getName() + " (error while printing stack trace)"; + } + } +} diff --git a/groot-tests/test-e2e-common/src/test/resources/grootstream.yaml b/groot-tests/test-e2e-common/src/test/resources/grootstream.yaml new file mode 100644 index 0000000..0def444 --- /dev/null +++ b/groot-tests/test-e2e-common/src/test/resources/grootstream.yaml @@ -0,0 +1,14 @@ +grootstream: + knowledge_base: + - name: tsg_ip_asn + fs_type: local + fs_path: /tmp/grootstream/config/dat/ + files: + - asn_builtin.mmdb + - name: tsg_ip_location + fs_type: local + fs_path: /tmp/grootstream/config/dat/ + files: + - ip_builtin.mmdb + properties: + scheduler.knowledge_base.update.interval.minutes: 5 diff --git a/groot-tests/test-e2e-common/src/test/resources/log4j2.properties b/groot-tests/test-e2e-common/src/test/resources/log4j2.properties new file mode 100644 index 0000000..fb3ac1e --- /dev/null +++ b/groot-tests/test-e2e-common/src/test/resources/log4j2.properties @@ -0,0 +1,42 @@ +################################################################################ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +################################################################################ + +rootLogger.level = INFO + +rootLogger.appenderRef.consoleStdout.ref = consoleStdoutAppender +rootLogger.appenderRef.consoleStderr.ref = consoleStderrAppender + +appender.consoleStdout.name = consoleStdoutAppender +appender.consoleStdout.type = CONSOLE +appender.consoleStdout.target = SYSTEM_OUT +appender.consoleStdout.layout.type = PatternLayout +appender.consoleStdout.layout.pattern = %d{yyyy-MM-dd HH:mm:ss,SSS} %-5p %c [%t] - %m%n +appender.consoleStdout.filter.acceptLtWarn.type = ThresholdFilter +appender.consoleStdout.filter.acceptLtWarn.level = WARN +appender.consoleStdout.filter.acceptLtWarn.onMatch = DENY +appender.consoleStdout.filter.acceptLtWarn.onMismatch = ACCEPT + +appender.consoleStderr.name = consoleStderrAppender +appender.consoleStderr.type = CONSOLE +appender.consoleStderr.target = SYSTEM_ERR +appender.consoleStderr.layout.type = PatternLayout +appender.consoleStderr.layout.pattern = %d{yyyy-MM-dd HH:mm:ss,SSS} %-5p %c [%t] - %m%n +appender.consoleStderr.filter.acceptGteWarn.type = ThresholdFilter +appender.consoleStderr.filter.acceptGteWarn.level = WARN +appender.consoleStderr.filter.acceptGteWarn.onMatch = ACCEPT +appender.consoleStderr.filter.acceptGteWarn.onMismatch = DENY diff --git a/groot-tests/test-e2e-connector-clickhouse/pom.xml b/groot-tests/test-e2e-connector-clickhouse/pom.xml new file mode 100644 index 0000000..b9d4564 --- /dev/null +++ b/groot-tests/test-e2e-connector-clickhouse/pom.xml @@ -0,0 +1,88 @@ + + + 4.0.0 + + com.geedgenetworks + groot-tests + ${revision} + + + test-e2e-connector-clickhouse + Groot : Tests : E2E : Connector : ClickHouse + + + 11 + 11 + UTF-8 + 0.6.3 + 4.0.3 + 5.2.1 + + + + + + com.geedgenetworks + test-e2e-common + ${project.version} + test-jar + test + + + + org.testcontainers + clickhouse + ${testcontainer.version} + test + + + + com.zaxxer + HikariCP + ${hikaricp.version} + test + + + + org.lz4 + lz4-java + 1.8.0 + test + + + + + com.clickhouse + clickhouse-jdbc + ${clickhouse.jdbc.version} + test + + + + + org.apache.httpcomponents.client5 + httpclient5 + ${apache-httpclient.version} + test + + + + com.geedgenetworks + connector-clickhouse + ${project.version} + test + + + + + org.xerial.snappy + snappy-java + test + + + + + + \ No newline at end of file diff --git a/groot-tests/test-e2e-connector-clickhouse/src/test/java/com/geedgenetworks/test/e2e/connector/clickhouse/ClickHouseIT.java b/groot-tests/test-e2e-connector-clickhouse/src/test/java/com/geedgenetworks/test/e2e/connector/clickhouse/ClickHouseIT.java new file mode 100644 index 0000000..8b25f14 --- /dev/null +++ b/groot-tests/test-e2e-connector-clickhouse/src/test/java/com/geedgenetworks/test/e2e/connector/clickhouse/ClickHouseIT.java @@ -0,0 +1,349 @@ +package com.geedgenetworks.test.e2e.connector.clickhouse; + +import com.alibaba.fastjson2.JSON; +import com.geedgenetworks.test.e2e.common.TestResource; +import com.geedgenetworks.test.e2e.common.TestSuiteBase; +import com.geedgenetworks.test.e2e.common.container.ContainerUtil; +import com.geedgenetworks.test.e2e.common.container.TestContainer; +import com.geedgenetworks.test.e2e.common.container.TestContainerId; +import com.geedgenetworks.test.e2e.common.junit.DisabledOnContainer; +import com.google.common.collect.Maps; +import com.typesafe.config.Config; +import com.typesafe.config.ConfigFactory; +import lombok.extern.slf4j.Slf4j; +import org.awaitility.Awaitility; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.TestTemplate; +import org.testcontainers.containers.ClickHouseContainer; +import org.testcontainers.containers.Container; +import org.testcontainers.containers.output.Slf4jLogConsumer; +import org.testcontainers.lifecycle.Startables; +import org.testcontainers.utility.DockerLoggerFactory; +import org.testcontainers.utility.MountableFile; + +import java.io.File; +import java.io.IOException; +import java.math.BigDecimal; +import java.sql.*; +import java.util.*; +import java.util.concurrent.CompletableFuture; +import java.util.concurrent.TimeUnit; +import java.util.stream.Stream; + +import static org.awaitility.Awaitility.await; + +@Slf4j +@DisabledOnContainer( + value = {TestContainerId.FLINK_1_17}, + disabledReason = "Override TestSuiteBase @DisabledOnContainer") +public class ClickHouseIT extends TestSuiteBase implements TestResource { + private static final String CLICKHOUSE_DOCKER_IMAGE = "clickhouse/clickhouse-server:23.3.19.32"; + private static final String DRIVER_CLASS = "com.clickhouse.jdbc.ClickHouseDriver"; + private static final String INIT_CLICKHOUSE_PATH = "/init/clickhouse_test_sql.conf"; + private static final String DATABASE = "default"; + private static final String SOURCE_TABLE = "source_table"; + private static final String SINK_TABLE = "sink_table"; + private static final String INSERT_SQL = "insert_sql"; + private static final String COMPARE_SQL = "compare_sql"; + private static final String HOST = "clickhouse"; + private static final Config CONFIG = getInitClickhouseConfig(); + private ClickHouseContainer clickHouseContainer; + private Connection connection; + private static final String[] default_columns = new String[] { + "id", + "c_array_string", + "c_array_short", + "c_array_int", + "c_array_long", + "c_array_float", + "c_array_double", + "c_string", + "c_int8", + "c_int16", + "c_int32", + "c_int64", + "c_float32", + "c_float64", + "c_decimal", + "c_nullable", + "c_lowcardinality" + }; + private static final List>TEST_DATASET = generateTestDataSet(); + + + @BeforeAll + @Override + public void startUp() throws Exception { + this.clickHouseContainer = + new ClickHouseContainer(CLICKHOUSE_DOCKER_IMAGE) + .withNetwork(NETWORK) + .withNetworkAliases(HOST) + .withCopyFileToContainer(MountableFile.forClasspathResource("init/users.xml"), "/etc/clickhouse-server/users.xml") + .withCopyFileToContainer(MountableFile.forClasspathResource("init/init-clickhouse.sql"), "/docker-entrypoint-initdb.d/init-clickhouse.sql") + .withLogConsumer( + new Slf4jLogConsumer( + DockerLoggerFactory.getLogger(CLICKHOUSE_DOCKER_IMAGE))); + + Startables.deepStart(Stream.of(this.clickHouseContainer)).join(); + System.out.println("Clickhouse JDBC URL: " + this.clickHouseContainer.getJdbcUrl()); + System.out.println("Clickhouse username: " + this.clickHouseContainer.getUsername()); + System.out.println("Clickhouse password: " + this.clickHouseContainer.getPassword()); + + log.info("Clickhouse container started"); + Awaitility.given() + .ignoreExceptions() + .await() + .atMost(360L, TimeUnit.SECONDS) + .untilAsserted(this::initConnection); + this.initializeClickhouseTable(); + this.batchInsertData(); + log.info(JSON.toJSONString(TEST_DATASET)); + + } + + private void initConnection() + throws SQLException, ClassNotFoundException, InstantiationException, + IllegalAccessException { + final Properties info = new Properties(); + info.put("user", this.clickHouseContainer.getUsername()); + info.put("password", this.clickHouseContainer.getPassword()); + this.connection = + ((Driver) Class.forName(DRIVER_CLASS).newInstance()) + .connect(this.clickHouseContainer.getJdbcUrl(), info); + + } + + @TestTemplate + public void testClickHouse(TestContainer container) throws Exception { + assertHasData(SOURCE_TABLE); + } + + @TestTemplate + public void testClickHouseDataTypeSinkTable(TestContainer container) throws Exception { + CompletableFuture.supplyAsync( + () -> { + try { + Container.ExecResult execResult = container.executeJob("/clickhouse_data_type_sink.yaml"); + Assertions.assertEquals(0, execResult.getExitCode(), execResult.getStderr()); + return execResult; + } catch (Exception e) { + log.error("Commit task exception:" + e.getMessage()); + throw new RuntimeException(e); + } + }); + + await().atMost(300000, TimeUnit.MILLISECONDS) + .untilAsserted( + () -> { + assertHasData(SINK_TABLE); + compareResult(); + }); + } + + + private void assertHasData(String table) { + String sql = String.format("select * from %s.%s limit 1", DATABASE, table); + try (Statement statement = connection.createStatement(); + ResultSet source = statement.executeQuery(sql);) { + Assertions.assertTrue(source.next()); + } catch (SQLException e) { + throw new RuntimeException("test clickhouse server image error", e); + } + } + + private void clearTable(String table) { + try (Statement statement = connection.createStatement()) { + statement.execute(String.format("truncate table %s.%s", DATABASE, table)); + } catch (SQLException e) { + throw new RuntimeException("Test clickhouse server image error", e); + } + } + + private void compareResult() throws SQLException, IOException { + String sourceSql = "select * from " + SOURCE_TABLE + " order by id "; + String sinkSql = "select * from " + SINK_TABLE + " order by id"; + try (Statement sourceStatement = connection.createStatement(); + Statement sinkStatement = connection.createStatement(); + ResultSet sourceResultSet = sourceStatement.executeQuery(sourceSql); + ResultSet sinkResultSet = sinkStatement.executeQuery(sinkSql)) { + Assertions.assertEquals( + sourceResultSet.getMetaData().getColumnCount(), + sinkResultSet.getMetaData().getColumnCount()); + String columns = String.join(",", default_columns); + Assertions.assertTrue( + compare(String.format(CONFIG.getString(COMPARE_SQL), columns, columns))); + } + + sourceSql = "select count(distinct id) as count from " + SOURCE_TABLE; + sinkSql = "select count(distinct id) as count from " + SINK_TABLE; + + try (Statement sourceStatement = connection.createStatement(); + Statement sinkStatement = connection.createStatement(); + ResultSet sourceResultSet = sourceStatement.executeQuery(sourceSql); + ResultSet sinkResultSet = sinkStatement.executeQuery(sinkSql)) { + while (sourceResultSet.next()) { + if (sinkResultSet.next()) { + int sinkUniqueIds = sinkResultSet.getInt("count"); + int sourceUniqueIds = sourceResultSet.getInt("count"); + Assertions.assertEquals(sinkUniqueIds, sourceUniqueIds); + } + } + } + + + } + + private Boolean compare(String sql) { + try (Statement statement = connection.createStatement(); + ResultSet resultSet = statement.executeQuery(sql);) { + return !resultSet.next(); + } catch (SQLException e) { + throw new RuntimeException("result compare error", e); + } + } + + + private void batchInsertData() { + String sql = CONFIG.getString(INSERT_SQL); + PreparedStatement preparedStatement = null; + try { + this.connection.setAutoCommit(true); + preparedStatement = this.connection.prepareStatement(sql); + for (Map row : TEST_DATASET) { + preparedStatement.setLong(1, (Long) row.get(default_columns[0])); + preparedStatement.setArray(2, toSqlArray(row.get(default_columns[1]))); + preparedStatement.setArray(3, toSqlArray(row.get(default_columns[2]))); + preparedStatement.setArray(4, toSqlArray(row.get(default_columns[3]))); + preparedStatement.setArray(5, toSqlArray(row.get(default_columns[4]))); + preparedStatement.setArray(6, toSqlArray(row.get(default_columns[5]))); + preparedStatement.setArray(7, toSqlArray(row.get(default_columns[6]))); + preparedStatement.setString(8, (String) row.get(default_columns[7])); + preparedStatement.setByte(9, (Byte) row.get(default_columns[8])); + preparedStatement.setShort(10, (Short) row.get(default_columns[9])); + preparedStatement.setInt(11, (Integer) row.get(default_columns[10])); + preparedStatement.setLong(12, (Long) row.get(default_columns[11])); + preparedStatement.setFloat(13, (Float) row.get(default_columns[12])); + preparedStatement.setDouble(14, (Double) row.get(default_columns[13])); + preparedStatement.setBigDecimal(15, (BigDecimal) row.get(default_columns[14])); + preparedStatement.setInt(16, (Integer) row.get(default_columns[15])); + preparedStatement.setString(17, (String) row.get(default_columns[16])); + preparedStatement.addBatch(); + } + + preparedStatement.executeBatch(); + preparedStatement.clearBatch(); + + } catch (SQLException e) { + throw new RuntimeException("Batch insert data failed!", e); + } finally { + if (preparedStatement != null) { + try { + preparedStatement.close(); + } catch (SQLException e) { + throw new RuntimeException("PreparedStatement close failed!", e); + } + } + } + + + + } + + + private Array toSqlArray(Object value) throws SQLException { + Object[] elements = null; + String sqlType = null; + if (String[].class.equals(value.getClass())) { + sqlType = "TEXT"; + elements = (String[]) value; + } else if (Boolean[].class.equals(value.getClass())) { + sqlType = "BOOLEAN"; + elements = (Boolean[]) value; + } else if (Byte[].class.equals(value.getClass())) { + sqlType = "TINYINT"; + elements = (Byte[]) value; + } else if (Short[].class.equals(value.getClass())) { + sqlType = "SMALLINT"; + elements = (Short[]) value; + } else if (Integer[].class.equals(value.getClass())) { + sqlType = "INTEGER"; + elements = (Integer[]) value; + } else if (Long[].class.equals(value.getClass())) { + sqlType = "BIGINT"; + elements = (Long[]) value; + } else if (Float[].class.equals(value.getClass())) { + sqlType = "REAL"; + elements = (Float[]) value; + } else if (Double[].class.equals(value.getClass())) { + sqlType = "DOUBLE"; + elements = (Double[]) value; + } + if (sqlType == null) { + throw new IllegalArgumentException( + "array inject error, not supported data type: " + value.getClass()); + } + return connection.createArrayOf(sqlType, elements); + } + + private static List> generateTestDataSet() { + List> rows = new ArrayList<>(); + for (int i = 0; i < 100; ++i) { + Map row = Maps.newLinkedHashMap(); + row.put(default_columns[0], (long) i); + row.put(default_columns[1], new String[] {"string"}); + row.put(default_columns[2], new Short[] {Short.parseShort("1")}); + row.put(default_columns[3], new Integer[] {Integer.parseInt("1")}); + row.put(default_columns[4], new Long[] {Long.parseLong("1")}); + row.put(default_columns[5], new Float[] {Float.parseFloat("1.1")}); + row.put(default_columns[6], new Double[] {Double.parseDouble("1.1")}); + row.put(default_columns[7], "string"); + row.put(default_columns[8], Byte.parseByte("1")); + row.put(default_columns[9], Short.parseShort("1")); + row.put(default_columns[10], Integer.parseInt("1")); + row.put(default_columns[11], Long.parseLong("1")); + row.put(default_columns[12], Float.parseFloat("1.1")); + row.put(default_columns[13], Double.parseDouble("1.1")); + row.put(default_columns[14], BigDecimal.valueOf(11L, 1)); + row.put(default_columns[15], i); + row.put(default_columns[16], "string"); + rows.add(row); + } + return rows; + } + + + + private void initializeClickhouseTable() { + try { + Statement statement = this.connection.createStatement(); + statement.execute(CONFIG.getString(SOURCE_TABLE)); + statement.execute(CONFIG.getString(SINK_TABLE)); + } catch (SQLException e) { + throw new RuntimeException("Initializing Clickhouse table failed!", e); + } + } + + private static Config getInitClickhouseConfig() { + File file = ContainerUtil.getResourcesFile(INIT_CLICKHOUSE_PATH); + Config config = ConfigFactory.parseFile(file); + assert config.hasPath(SOURCE_TABLE) + && config.hasPath(SINK_TABLE) + && config.hasPath(INSERT_SQL) + && config.hasPath(COMPARE_SQL); + return config; + } + + @AfterAll + @Override + public void tearDown() throws Exception { + if (this.connection != null) { + this.connection.close(); + } + if (this.clickHouseContainer != null) { + this.clickHouseContainer.stop(); + } + + } +} diff --git a/groot-tests/test-e2e-connector-clickhouse/src/test/resources/clickhouse_data_type_sink.yaml b/groot-tests/test-e2e-connector-clickhouse/src/test/resources/clickhouse_data_type_sink.yaml new file mode 100644 index 0000000..3406a67 --- /dev/null +++ b/groot-tests/test-e2e-connector-clickhouse/src/test/resources/clickhouse_data_type_sink.yaml @@ -0,0 +1,79 @@ +sources: + inline_source: + type: inline + schema: + fields: + - name: id + type: bigint + - name: c_array_string + type: array + - name: c_array_short + type: array + - name: c_array_int + type: array + - name: c_array_long + type: array + - name: c_array_float + type: array + - name: c_array_double + type: array + - name: c_string + type: string + - name: c_int8 + type: int + - name: c_int16 + type: int + - name: c_int32 + type: int + - name: c_int64 + type: int + - name: c_float32 + type: float + - name: c_float64 + type: double + - name: c_decimal + type: double + - name: c_date + type: string + - name: c_datetime + type: string + - name: c_nullable + type: int + - name: c_lowcardinality + type: string + properties: + # + # [string] Event Data, it will be parsed to Map by the specified format. + # + data: '[{"id":0,"c_array_string":["string"],"c_array_short":[1],"c_array_int":[1],"c_array_long":[1],"c_array_float":[1.1],"c_array_double":[1.1],"c_string":"string","c_int8":1,"c_int16":1,"c_int32":1,"c_int64":1,"c_float32":1.1,"c_float64":1.1,"c_decimal":1.1,"c_nullable":0,"c_lowcardinality":"string"},{"id":1,"c_array_string":["string"],"c_array_short":[1],"c_array_int":[1],"c_array_long":[1],"c_array_float":[1.1],"c_array_double":[1.1],"c_string":"string","c_int8":1,"c_int16":1,"c_int32":1,"c_int64":1,"c_float32":1.1,"c_float64":1.1,"c_decimal":1.1,"c_nullable":1,"c_lowcardinality":"string"},{"id":2,"c_array_string":["string"],"c_array_short":[1],"c_array_int":[1],"c_array_long":[1],"c_array_float":[1.1],"c_array_double":[1.1],"c_string":"string","c_int8":1,"c_int16":1,"c_int32":1,"c_int64":1,"c_float32":1.1,"c_float64":1.1,"c_decimal":1.1,"c_nullable":2,"c_lowcardinality":"string"},{"id":3,"c_array_string":["string"],"c_array_short":[1],"c_array_int":[1],"c_array_long":[1],"c_array_float":[1.1],"c_array_double":[1.1],"c_string":"string","c_int8":1,"c_int16":1,"c_int32":1,"c_int64":1,"c_float32":1.1,"c_float64":1.1,"c_decimal":1.1,"c_nullable":3,"c_lowcardinality":"string"},{"id":4,"c_array_string":["string"],"c_array_short":[1],"c_array_int":[1],"c_array_long":[1],"c_array_float":[1.1],"c_array_double":[1.1],"c_string":"string","c_int8":1,"c_int16":1,"c_int32":1,"c_int64":1,"c_float32":1.1,"c_float64":1.1,"c_decimal":1.1,"c_nullable":4,"c_lowcardinality":"string"},{"id":5,"c_array_string":["string"],"c_array_short":[1],"c_array_int":[1],"c_array_long":[1],"c_array_float":[1.1],"c_array_double":[1.1],"c_string":"string","c_int8":1,"c_int16":1,"c_int32":1,"c_int64":1,"c_float32":1.1,"c_float64":1.1,"c_decimal":1.1,"c_nullable":5,"c_lowcardinality":"string"},{"id":6,"c_array_string":["string"],"c_array_short":[1],"c_array_int":[1],"c_array_long":[1],"c_array_float":[1.1],"c_array_double":[1.1],"c_string":"string","c_int8":1,"c_int16":1,"c_int32":1,"c_int64":1,"c_float32":1.1,"c_float64":1.1,"c_decimal":1.1,"c_nullable":6,"c_lowcardinality":"string"},{"id":7,"c_array_string":["string"],"c_array_short":[1],"c_array_int":[1],"c_array_long":[1],"c_array_float":[1.1],"c_array_double":[1.1],"c_string":"string","c_int8":1,"c_int16":1,"c_int32":1,"c_int64":1,"c_float32":1.1,"c_float64":1.1,"c_decimal":1.1,"c_nullable":7,"c_lowcardinality":"string"},{"id":8,"c_array_string":["string"],"c_array_short":[1],"c_array_int":[1],"c_array_long":[1],"c_array_float":[1.1],"c_array_double":[1.1],"c_string":"string","c_int8":1,"c_int16":1,"c_int32":1,"c_int64":1,"c_float32":1.1,"c_float64":1.1,"c_decimal":1.1,"c_nullable":8,"c_lowcardinality":"string"},{"id":9,"c_array_string":["string"],"c_array_short":[1],"c_array_int":[1],"c_array_long":[1],"c_array_float":[1.1],"c_array_double":[1.1],"c_string":"string","c_int8":1,"c_int16":1,"c_int32":1,"c_int64":1,"c_float32":1.1,"c_float64":1.1,"c_decimal":1.1,"c_nullable":9,"c_lowcardinality":"string"},{"id":10,"c_array_string":["string"],"c_array_short":[1],"c_array_int":[1],"c_array_long":[1],"c_array_float":[1.1],"c_array_double":[1.1],"c_string":"string","c_int8":1,"c_int16":1,"c_int32":1,"c_int64":1,"c_float32":1.1,"c_float64":1.1,"c_decimal":1.1,"c_nullable":10,"c_lowcardinality":"string"},{"id":11,"c_array_string":["string"],"c_array_short":[1],"c_array_int":[1],"c_array_long":[1],"c_array_float":[1.1],"c_array_double":[1.1],"c_string":"string","c_int8":1,"c_int16":1,"c_int32":1,"c_int64":1,"c_float32":1.1,"c_float64":1.1,"c_decimal":1.1,"c_nullable":11,"c_lowcardinality":"string"},{"id":12,"c_array_string":["string"],"c_array_short":[1],"c_array_int":[1],"c_array_long":[1],"c_array_float":[1.1],"c_array_double":[1.1],"c_string":"string","c_int8":1,"c_int16":1,"c_int32":1,"c_int64":1,"c_float32":1.1,"c_float64":1.1,"c_decimal":1.1,"c_nullable":12,"c_lowcardinality":"string"},{"id":13,"c_array_string":["string"],"c_array_short":[1],"c_array_int":[1],"c_array_long":[1],"c_array_float":[1.1],"c_array_double":[1.1],"c_string":"string","c_int8":1,"c_int16":1,"c_int32":1,"c_int64":1,"c_float32":1.1,"c_float64":1.1,"c_decimal":1.1,"c_nullable":13,"c_lowcardinality":"string"},{"id":14,"c_array_string":["string"],"c_array_short":[1],"c_array_int":[1],"c_array_long":[1],"c_array_float":[1.1],"c_array_double":[1.1],"c_string":"string","c_int8":1,"c_int16":1,"c_int32":1,"c_int64":1,"c_float32":1.1,"c_float64":1.1,"c_decimal":1.1,"c_nullable":14,"c_lowcardinality":"string"},{"id":15,"c_array_string":["string"],"c_array_short":[1],"c_array_int":[1],"c_array_long":[1],"c_array_float":[1.1],"c_array_double":[1.1],"c_string":"string","c_int8":1,"c_int16":1,"c_int32":1,"c_int64":1,"c_float32":1.1,"c_float64":1.1,"c_decimal":1.1,"c_nullable":15,"c_lowcardinality":"string"},{"id":16,"c_array_string":["string"],"c_array_short":[1],"c_array_int":[1],"c_array_long":[1],"c_array_float":[1.1],"c_array_double":[1.1],"c_string":"string","c_int8":1,"c_int16":1,"c_int32":1,"c_int64":1,"c_float32":1.1,"c_float64":1.1,"c_decimal":1.1,"c_nullable":16,"c_lowcardinality":"string"},{"id":17,"c_array_string":["string"],"c_array_short":[1],"c_array_int":[1],"c_array_long":[1],"c_array_float":[1.1],"c_array_double":[1.1],"c_string":"string","c_int8":1,"c_int16":1,"c_int32":1,"c_int64":1,"c_float32":1.1,"c_float64":1.1,"c_decimal":1.1,"c_nullable":17,"c_lowcardinality":"string"},{"id":18,"c_array_string":["string"],"c_array_short":[1],"c_array_int":[1],"c_array_long":[1],"c_array_float":[1.1],"c_array_double":[1.1],"c_string":"string","c_int8":1,"c_int16":1,"c_int32":1,"c_int64":1,"c_float32":1.1,"c_float64":1.1,"c_decimal":1.1,"c_nullable":18,"c_lowcardinality":"string"},{"id":19,"c_array_string":["string"],"c_array_short":[1],"c_array_int":[1],"c_array_long":[1],"c_array_float":[1.1],"c_array_double":[1.1],"c_string":"string","c_int8":1,"c_int16":1,"c_int32":1,"c_int64":1,"c_float32":1.1,"c_float64":1.1,"c_decimal":1.1,"c_nullable":19,"c_lowcardinality":"string"},{"id":20,"c_array_string":["string"],"c_array_short":[1],"c_array_int":[1],"c_array_long":[1],"c_array_float":[1.1],"c_array_double":[1.1],"c_string":"string","c_int8":1,"c_int16":1,"c_int32":1,"c_int64":1,"c_float32":1.1,"c_float64":1.1,"c_decimal":1.1,"c_nullable":20,"c_lowcardinality":"string"},{"id":21,"c_array_string":["string"],"c_array_short":[1],"c_array_int":[1],"c_array_long":[1],"c_array_float":[1.1],"c_array_double":[1.1],"c_string":"string","c_int8":1,"c_int16":1,"c_int32":1,"c_int64":1,"c_float32":1.1,"c_float64":1.1,"c_decimal":1.1,"c_nullable":21,"c_lowcardinality":"string"},{"id":22,"c_array_string":["string"],"c_array_short":[1],"c_array_int":[1],"c_array_long":[1],"c_array_float":[1.1],"c_array_double":[1.1],"c_string":"string","c_int8":1,"c_int16":1,"c_int32":1,"c_int64":1,"c_float32":1.1,"c_float64":1.1,"c_decimal":1.1,"c_nullable":22,"c_lowcardinality":"string"},{"id":23,"c_array_string":["string"],"c_array_short":[1],"c_array_int":[1],"c_array_long":[1],"c_array_float":[1.1],"c_array_double":[1.1],"c_string":"string","c_int8":1,"c_int16":1,"c_int32":1,"c_int64":1,"c_float32":1.1,"c_float64":1.1,"c_decimal":1.1,"c_nullable":23,"c_lowcardinality":"string"},{"id":24,"c_array_string":["string"],"c_array_short":[1],"c_array_int":[1],"c_array_long":[1],"c_array_float":[1.1],"c_array_double":[1.1],"c_string":"string","c_int8":1,"c_int16":1,"c_int32":1,"c_int64":1,"c_float32":1.1,"c_float64":1.1,"c_decimal":1.1,"c_nullable":24,"c_lowcardinality":"string"},{"id":25,"c_array_string":["string"],"c_array_short":[1],"c_array_int":[1],"c_array_long":[1],"c_array_float":[1.1],"c_array_double":[1.1],"c_string":"string","c_int8":1,"c_int16":1,"c_int32":1,"c_int64":1,"c_float32":1.1,"c_float64":1.1,"c_decimal":1.1,"c_nullable":25,"c_lowcardinality":"string"},{"id":26,"c_array_string":["string"],"c_array_short":[1],"c_array_int":[1],"c_array_long":[1],"c_array_float":[1.1],"c_array_double":[1.1],"c_string":"string","c_int8":1,"c_int16":1,"c_int32":1,"c_int64":1,"c_float32":1.1,"c_float64":1.1,"c_decimal":1.1,"c_nullable":26,"c_lowcardinality":"string"},{"id":27,"c_array_string":["string"],"c_array_short":[1],"c_array_int":[1],"c_array_long":[1],"c_array_float":[1.1],"c_array_double":[1.1],"c_string":"string","c_int8":1,"c_int16":1,"c_int32":1,"c_int64":1,"c_float32":1.1,"c_float64":1.1,"c_decimal":1.1,"c_nullable":27,"c_lowcardinality":"string"},{"id":28,"c_array_string":["string"],"c_array_short":[1],"c_array_int":[1],"c_array_long":[1],"c_array_float":[1.1],"c_array_double":[1.1],"c_string":"string","c_int8":1,"c_int16":1,"c_int32":1,"c_int64":1,"c_float32":1.1,"c_float64":1.1,"c_decimal":1.1,"c_nullable":28,"c_lowcardinality":"string"},{"id":29,"c_array_string":["string"],"c_array_short":[1],"c_array_int":[1],"c_array_long":[1],"c_array_float":[1.1],"c_array_double":[1.1],"c_string":"string","c_int8":1,"c_int16":1,"c_int32":1,"c_int64":1,"c_float32":1.1,"c_float64":1.1,"c_decimal":1.1,"c_nullable":29,"c_lowcardinality":"string"},{"id":30,"c_array_string":["string"],"c_array_short":[1],"c_array_int":[1],"c_array_long":[1],"c_array_float":[1.1],"c_array_double":[1.1],"c_string":"string","c_int8":1,"c_int16":1,"c_int32":1,"c_int64":1,"c_float32":1.1,"c_float64":1.1,"c_decimal":1.1,"c_nullable":30,"c_lowcardinality":"string"},{"id":31,"c_array_string":["string"],"c_array_short":[1],"c_array_int":[1],"c_array_long":[1],"c_array_float":[1.1],"c_array_double":[1.1],"c_string":"string","c_int8":1,"c_int16":1,"c_int32":1,"c_int64":1,"c_float32":1.1,"c_float64":1.1,"c_decimal":1.1,"c_nullable":31,"c_lowcardinality":"string"},{"id":32,"c_array_string":["string"],"c_array_short":[1],"c_array_int":[1],"c_array_long":[1],"c_array_float":[1.1],"c_array_double":[1.1],"c_string":"string","c_int8":1,"c_int16":1,"c_int32":1,"c_int64":1,"c_float32":1.1,"c_float64":1.1,"c_decimal":1.1,"c_nullable":32,"c_lowcardinality":"string"},{"id":33,"c_array_string":["string"],"c_array_short":[1],"c_array_int":[1],"c_array_long":[1],"c_array_float":[1.1],"c_array_double":[1.1],"c_string":"string","c_int8":1,"c_int16":1,"c_int32":1,"c_int64":1,"c_float32":1.1,"c_float64":1.1,"c_decimal":1.1,"c_nullable":33,"c_lowcardinality":"string"},{"id":34,"c_array_string":["string"],"c_array_short":[1],"c_array_int":[1],"c_array_long":[1],"c_array_float":[1.1],"c_array_double":[1.1],"c_string":"string","c_int8":1,"c_int16":1,"c_int32":1,"c_int64":1,"c_float32":1.1,"c_float64":1.1,"c_decimal":1.1,"c_nullable":34,"c_lowcardinality":"string"},{"id":35,"c_array_string":["string"],"c_array_short":[1],"c_array_int":[1],"c_array_long":[1],"c_array_float":[1.1],"c_array_double":[1.1],"c_string":"string","c_int8":1,"c_int16":1,"c_int32":1,"c_int64":1,"c_float32":1.1,"c_float64":1.1,"c_decimal":1.1,"c_nullable":35,"c_lowcardinality":"string"},{"id":36,"c_array_string":["string"],"c_array_short":[1],"c_array_int":[1],"c_array_long":[1],"c_array_float":[1.1],"c_array_double":[1.1],"c_string":"string","c_int8":1,"c_int16":1,"c_int32":1,"c_int64":1,"c_float32":1.1,"c_float64":1.1,"c_decimal":1.1,"c_nullable":36,"c_lowcardinality":"string"},{"id":37,"c_array_string":["string"],"c_array_short":[1],"c_array_int":[1],"c_array_long":[1],"c_array_float":[1.1],"c_array_double":[1.1],"c_string":"string","c_int8":1,"c_int16":1,"c_int32":1,"c_int64":1,"c_float32":1.1,"c_float64":1.1,"c_decimal":1.1,"c_nullable":37,"c_lowcardinality":"string"},{"id":38,"c_array_string":["string"],"c_array_short":[1],"c_array_int":[1],"c_array_long":[1],"c_array_float":[1.1],"c_array_double":[1.1],"c_string":"string","c_int8":1,"c_int16":1,"c_int32":1,"c_int64":1,"c_float32":1.1,"c_float64":1.1,"c_decimal":1.1,"c_nullable":38,"c_lowcardinality":"string"},{"id":39,"c_array_string":["string"],"c_array_short":[1],"c_array_int":[1],"c_array_long":[1],"c_array_float":[1.1],"c_array_double":[1.1],"c_string":"string","c_int8":1,"c_int16":1,"c_int32":1,"c_int64":1,"c_float32":1.1,"c_float64":1.1,"c_decimal":1.1,"c_nullable":39,"c_lowcardinality":"string"},{"id":40,"c_array_string":["string"],"c_array_short":[1],"c_array_int":[1],"c_array_long":[1],"c_array_float":[1.1],"c_array_double":[1.1],"c_string":"string","c_int8":1,"c_int16":1,"c_int32":1,"c_int64":1,"c_float32":1.1,"c_float64":1.1,"c_decimal":1.1,"c_nullable":40,"c_lowcardinality":"string"},{"id":41,"c_array_string":["string"],"c_array_short":[1],"c_array_int":[1],"c_array_long":[1],"c_array_float":[1.1],"c_array_double":[1.1],"c_string":"string","c_int8":1,"c_int16":1,"c_int32":1,"c_int64":1,"c_float32":1.1,"c_float64":1.1,"c_decimal":1.1,"c_nullable":41,"c_lowcardinality":"string"},{"id":42,"c_array_string":["string"],"c_array_short":[1],"c_array_int":[1],"c_array_long":[1],"c_array_float":[1.1],"c_array_double":[1.1],"c_string":"string","c_int8":1,"c_int16":1,"c_int32":1,"c_int64":1,"c_float32":1.1,"c_float64":1.1,"c_decimal":1.1,"c_nullable":42,"c_lowcardinality":"string"},{"id":43,"c_array_string":["string"],"c_array_short":[1],"c_array_int":[1],"c_array_long":[1],"c_array_float":[1.1],"c_array_double":[1.1],"c_string":"string","c_int8":1,"c_int16":1,"c_int32":1,"c_int64":1,"c_float32":1.1,"c_float64":1.1,"c_decimal":1.1,"c_nullable":43,"c_lowcardinality":"string"},{"id":44,"c_array_string":["string"],"c_array_short":[1],"c_array_int":[1],"c_array_long":[1],"c_array_float":[1.1],"c_array_double":[1.1],"c_string":"string","c_int8":1,"c_int16":1,"c_int32":1,"c_int64":1,"c_float32":1.1,"c_float64":1.1,"c_decimal":1.1,"c_nullable":44,"c_lowcardinality":"string"},{"id":45,"c_array_string":["string"],"c_array_short":[1],"c_array_int":[1],"c_array_long":[1],"c_array_float":[1.1],"c_array_double":[1.1],"c_string":"string","c_int8":1,"c_int16":1,"c_int32":1,"c_int64":1,"c_float32":1.1,"c_float64":1.1,"c_decimal":1.1,"c_nullable":45,"c_lowcardinality":"string"},{"id":46,"c_array_string":["string"],"c_array_short":[1],"c_array_int":[1],"c_array_long":[1],"c_array_float":[1.1],"c_array_double":[1.1],"c_string":"string","c_int8":1,"c_int16":1,"c_int32":1,"c_int64":1,"c_float32":1.1,"c_float64":1.1,"c_decimal":1.1,"c_nullable":46,"c_lowcardinality":"string"},{"id":47,"c_array_string":["string"],"c_array_short":[1],"c_array_int":[1],"c_array_long":[1],"c_array_float":[1.1],"c_array_double":[1.1],"c_string":"string","c_int8":1,"c_int16":1,"c_int32":1,"c_int64":1,"c_float32":1.1,"c_float64":1.1,"c_decimal":1.1,"c_nullable":47,"c_lowcardinality":"string"},{"id":48,"c_array_string":["string"],"c_array_short":[1],"c_array_int":[1],"c_array_long":[1],"c_array_float":[1.1],"c_array_double":[1.1],"c_string":"string","c_int8":1,"c_int16":1,"c_int32":1,"c_int64":1,"c_float32":1.1,"c_float64":1.1,"c_decimal":1.1,"c_nullable":48,"c_lowcardinality":"string"},{"id":49,"c_array_string":["string"],"c_array_short":[1],"c_array_int":[1],"c_array_long":[1],"c_array_float":[1.1],"c_array_double":[1.1],"c_string":"string","c_int8":1,"c_int16":1,"c_int32":1,"c_int64":1,"c_float32":1.1,"c_float64":1.1,"c_decimal":1.1,"c_nullable":49,"c_lowcardinality":"string"},{"id":50,"c_array_string":["string"],"c_array_short":[1],"c_array_int":[1],"c_array_long":[1],"c_array_float":[1.1],"c_array_double":[1.1],"c_string":"string","c_int8":1,"c_int16":1,"c_int32":1,"c_int64":1,"c_float32":1.1,"c_float64":1.1,"c_decimal":1.1,"c_nullable":50,"c_lowcardinality":"string"},{"id":51,"c_array_string":["string"],"c_array_short":[1],"c_array_int":[1],"c_array_long":[1],"c_array_float":[1.1],"c_array_double":[1.1],"c_string":"string","c_int8":1,"c_int16":1,"c_int32":1,"c_int64":1,"c_float32":1.1,"c_float64":1.1,"c_decimal":1.1,"c_nullable":51,"c_lowcardinality":"string"},{"id":52,"c_array_string":["string"],"c_array_short":[1],"c_array_int":[1],"c_array_long":[1],"c_array_float":[1.1],"c_array_double":[1.1],"c_string":"string","c_int8":1,"c_int16":1,"c_int32":1,"c_int64":1,"c_float32":1.1,"c_float64":1.1,"c_decimal":1.1,"c_nullable":52,"c_lowcardinality":"string"},{"id":53,"c_array_string":["string"],"c_array_short":[1],"c_array_int":[1],"c_array_long":[1],"c_array_float":[1.1],"c_array_double":[1.1],"c_string":"string","c_int8":1,"c_int16":1,"c_int32":1,"c_int64":1,"c_float32":1.1,"c_float64":1.1,"c_decimal":1.1,"c_nullable":53,"c_lowcardinality":"string"},{"id":54,"c_array_string":["string"],"c_array_short":[1],"c_array_int":[1],"c_array_long":[1],"c_array_float":[1.1],"c_array_double":[1.1],"c_string":"string","c_int8":1,"c_int16":1,"c_int32":1,"c_int64":1,"c_float32":1.1,"c_float64":1.1,"c_decimal":1.1,"c_nullable":54,"c_lowcardinality":"string"},{"id":55,"c_array_string":["string"],"c_array_short":[1],"c_array_int":[1],"c_array_long":[1],"c_array_float":[1.1],"c_array_double":[1.1],"c_string":"string","c_int8":1,"c_int16":1,"c_int32":1,"c_int64":1,"c_float32":1.1,"c_float64":1.1,"c_decimal":1.1,"c_nullable":55,"c_lowcardinality":"string"},{"id":56,"c_array_string":["string"],"c_array_short":[1],"c_array_int":[1],"c_array_long":[1],"c_array_float":[1.1],"c_array_double":[1.1],"c_string":"string","c_int8":1,"c_int16":1,"c_int32":1,"c_int64":1,"c_float32":1.1,"c_float64":1.1,"c_decimal":1.1,"c_nullable":56,"c_lowcardinality":"string"},{"id":57,"c_array_string":["string"],"c_array_short":[1],"c_array_int":[1],"c_array_long":[1],"c_array_float":[1.1],"c_array_double":[1.1],"c_string":"string","c_int8":1,"c_int16":1,"c_int32":1,"c_int64":1,"c_float32":1.1,"c_float64":1.1,"c_decimal":1.1,"c_nullable":57,"c_lowcardinality":"string"},{"id":58,"c_array_string":["string"],"c_array_short":[1],"c_array_int":[1],"c_array_long":[1],"c_array_float":[1.1],"c_array_double":[1.1],"c_string":"string","c_int8":1,"c_int16":1,"c_int32":1,"c_int64":1,"c_float32":1.1,"c_float64":1.1,"c_decimal":1.1,"c_nullable":58,"c_lowcardinality":"string"},{"id":59,"c_array_string":["string"],"c_array_short":[1],"c_array_int":[1],"c_array_long":[1],"c_array_float":[1.1],"c_array_double":[1.1],"c_string":"string","c_int8":1,"c_int16":1,"c_int32":1,"c_int64":1,"c_float32":1.1,"c_float64":1.1,"c_decimal":1.1,"c_nullable":59,"c_lowcardinality":"string"},{"id":60,"c_array_string":["string"],"c_array_short":[1],"c_array_int":[1],"c_array_long":[1],"c_array_float":[1.1],"c_array_double":[1.1],"c_string":"string","c_int8":1,"c_int16":1,"c_int32":1,"c_int64":1,"c_float32":1.1,"c_float64":1.1,"c_decimal":1.1,"c_nullable":60,"c_lowcardinality":"string"},{"id":61,"c_array_string":["string"],"c_array_short":[1],"c_array_int":[1],"c_array_long":[1],"c_array_float":[1.1],"c_array_double":[1.1],"c_string":"string","c_int8":1,"c_int16":1,"c_int32":1,"c_int64":1,"c_float32":1.1,"c_float64":1.1,"c_decimal":1.1,"c_nullable":61,"c_lowcardinality":"string"},{"id":62,"c_array_string":["string"],"c_array_short":[1],"c_array_int":[1],"c_array_long":[1],"c_array_float":[1.1],"c_array_double":[1.1],"c_string":"string","c_int8":1,"c_int16":1,"c_int32":1,"c_int64":1,"c_float32":1.1,"c_float64":1.1,"c_decimal":1.1,"c_nullable":62,"c_lowcardinality":"string"},{"id":63,"c_array_string":["string"],"c_array_short":[1],"c_array_int":[1],"c_array_long":[1],"c_array_float":[1.1],"c_array_double":[1.1],"c_string":"string","c_int8":1,"c_int16":1,"c_int32":1,"c_int64":1,"c_float32":1.1,"c_float64":1.1,"c_decimal":1.1,"c_nullable":63,"c_lowcardinality":"string"},{"id":64,"c_array_string":["string"],"c_array_short":[1],"c_array_int":[1],"c_array_long":[1],"c_array_float":[1.1],"c_array_double":[1.1],"c_string":"string","c_int8":1,"c_int16":1,"c_int32":1,"c_int64":1,"c_float32":1.1,"c_float64":1.1,"c_decimal":1.1,"c_nullable":64,"c_lowcardinality":"string"},{"id":65,"c_array_string":["string"],"c_array_short":[1],"c_array_int":[1],"c_array_long":[1],"c_array_float":[1.1],"c_array_double":[1.1],"c_string":"string","c_int8":1,"c_int16":1,"c_int32":1,"c_int64":1,"c_float32":1.1,"c_float64":1.1,"c_decimal":1.1,"c_nullable":65,"c_lowcardinality":"string"},{"id":66,"c_array_string":["string"],"c_array_short":[1],"c_array_int":[1],"c_array_long":[1],"c_array_float":[1.1],"c_array_double":[1.1],"c_string":"string","c_int8":1,"c_int16":1,"c_int32":1,"c_int64":1,"c_float32":1.1,"c_float64":1.1,"c_decimal":1.1,"c_nullable":66,"c_lowcardinality":"string"},{"id":67,"c_array_string":["string"],"c_array_short":[1],"c_array_int":[1],"c_array_long":[1],"c_array_float":[1.1],"c_array_double":[1.1],"c_string":"string","c_int8":1,"c_int16":1,"c_int32":1,"c_int64":1,"c_float32":1.1,"c_float64":1.1,"c_decimal":1.1,"c_nullable":67,"c_lowcardinality":"string"},{"id":68,"c_array_string":["string"],"c_array_short":[1],"c_array_int":[1],"c_array_long":[1],"c_array_float":[1.1],"c_array_double":[1.1],"c_string":"string","c_int8":1,"c_int16":1,"c_int32":1,"c_int64":1,"c_float32":1.1,"c_float64":1.1,"c_decimal":1.1,"c_nullable":68,"c_lowcardinality":"string"},{"id":69,"c_array_string":["string"],"c_array_short":[1],"c_array_int":[1],"c_array_long":[1],"c_array_float":[1.1],"c_array_double":[1.1],"c_string":"string","c_int8":1,"c_int16":1,"c_int32":1,"c_int64":1,"c_float32":1.1,"c_float64":1.1,"c_decimal":1.1,"c_nullable":69,"c_lowcardinality":"string"},{"id":70,"c_array_string":["string"],"c_array_short":[1],"c_array_int":[1],"c_array_long":[1],"c_array_float":[1.1],"c_array_double":[1.1],"c_string":"string","c_int8":1,"c_int16":1,"c_int32":1,"c_int64":1,"c_float32":1.1,"c_float64":1.1,"c_decimal":1.1,"c_nullable":70,"c_lowcardinality":"string"},{"id":71,"c_array_string":["string"],"c_array_short":[1],"c_array_int":[1],"c_array_long":[1],"c_array_float":[1.1],"c_array_double":[1.1],"c_string":"string","c_int8":1,"c_int16":1,"c_int32":1,"c_int64":1,"c_float32":1.1,"c_float64":1.1,"c_decimal":1.1,"c_nullable":71,"c_lowcardinality":"string"},{"id":72,"c_array_string":["string"],"c_array_short":[1],"c_array_int":[1],"c_array_long":[1],"c_array_float":[1.1],"c_array_double":[1.1],"c_string":"string","c_int8":1,"c_int16":1,"c_int32":1,"c_int64":1,"c_float32":1.1,"c_float64":1.1,"c_decimal":1.1,"c_nullable":72,"c_lowcardinality":"string"},{"id":73,"c_array_string":["string"],"c_array_short":[1],"c_array_int":[1],"c_array_long":[1],"c_array_float":[1.1],"c_array_double":[1.1],"c_string":"string","c_int8":1,"c_int16":1,"c_int32":1,"c_int64":1,"c_float32":1.1,"c_float64":1.1,"c_decimal":1.1,"c_nullable":73,"c_lowcardinality":"string"},{"id":74,"c_array_string":["string"],"c_array_short":[1],"c_array_int":[1],"c_array_long":[1],"c_array_float":[1.1],"c_array_double":[1.1],"c_string":"string","c_int8":1,"c_int16":1,"c_int32":1,"c_int64":1,"c_float32":1.1,"c_float64":1.1,"c_decimal":1.1,"c_nullable":74,"c_lowcardinality":"string"},{"id":75,"c_array_string":["string"],"c_array_short":[1],"c_array_int":[1],"c_array_long":[1],"c_array_float":[1.1],"c_array_double":[1.1],"c_string":"string","c_int8":1,"c_int16":1,"c_int32":1,"c_int64":1,"c_float32":1.1,"c_float64":1.1,"c_decimal":1.1,"c_nullable":75,"c_lowcardinality":"string"},{"id":76,"c_array_string":["string"],"c_array_short":[1],"c_array_int":[1],"c_array_long":[1],"c_array_float":[1.1],"c_array_double":[1.1],"c_string":"string","c_int8":1,"c_int16":1,"c_int32":1,"c_int64":1,"c_float32":1.1,"c_float64":1.1,"c_decimal":1.1,"c_nullable":76,"c_lowcardinality":"string"},{"id":77,"c_array_string":["string"],"c_array_short":[1],"c_array_int":[1],"c_array_long":[1],"c_array_float":[1.1],"c_array_double":[1.1],"c_string":"string","c_int8":1,"c_int16":1,"c_int32":1,"c_int64":1,"c_float32":1.1,"c_float64":1.1,"c_decimal":1.1,"c_nullable":77,"c_lowcardinality":"string"},{"id":78,"c_array_string":["string"],"c_array_short":[1],"c_array_int":[1],"c_array_long":[1],"c_array_float":[1.1],"c_array_double":[1.1],"c_string":"string","c_int8":1,"c_int16":1,"c_int32":1,"c_int64":1,"c_float32":1.1,"c_float64":1.1,"c_decimal":1.1,"c_nullable":78,"c_lowcardinality":"string"},{"id":79,"c_array_string":["string"],"c_array_short":[1],"c_array_int":[1],"c_array_long":[1],"c_array_float":[1.1],"c_array_double":[1.1],"c_string":"string","c_int8":1,"c_int16":1,"c_int32":1,"c_int64":1,"c_float32":1.1,"c_float64":1.1,"c_decimal":1.1,"c_nullable":79,"c_lowcardinality":"string"},{"id":80,"c_array_string":["string"],"c_array_short":[1],"c_array_int":[1],"c_array_long":[1],"c_array_float":[1.1],"c_array_double":[1.1],"c_string":"string","c_int8":1,"c_int16":1,"c_int32":1,"c_int64":1,"c_float32":1.1,"c_float64":1.1,"c_decimal":1.1,"c_nullable":80,"c_lowcardinality":"string"},{"id":81,"c_array_string":["string"],"c_array_short":[1],"c_array_int":[1],"c_array_long":[1],"c_array_float":[1.1],"c_array_double":[1.1],"c_string":"string","c_int8":1,"c_int16":1,"c_int32":1,"c_int64":1,"c_float32":1.1,"c_float64":1.1,"c_decimal":1.1,"c_nullable":81,"c_lowcardinality":"string"},{"id":82,"c_array_string":["string"],"c_array_short":[1],"c_array_int":[1],"c_array_long":[1],"c_array_float":[1.1],"c_array_double":[1.1],"c_string":"string","c_int8":1,"c_int16":1,"c_int32":1,"c_int64":1,"c_float32":1.1,"c_float64":1.1,"c_decimal":1.1,"c_nullable":82,"c_lowcardinality":"string"},{"id":83,"c_array_string":["string"],"c_array_short":[1],"c_array_int":[1],"c_array_long":[1],"c_array_float":[1.1],"c_array_double":[1.1],"c_string":"string","c_int8":1,"c_int16":1,"c_int32":1,"c_int64":1,"c_float32":1.1,"c_float64":1.1,"c_decimal":1.1,"c_nullable":83,"c_lowcardinality":"string"},{"id":84,"c_array_string":["string"],"c_array_short":[1],"c_array_int":[1],"c_array_long":[1],"c_array_float":[1.1],"c_array_double":[1.1],"c_string":"string","c_int8":1,"c_int16":1,"c_int32":1,"c_int64":1,"c_float32":1.1,"c_float64":1.1,"c_decimal":1.1,"c_nullable":84,"c_lowcardinality":"string"},{"id":85,"c_array_string":["string"],"c_array_short":[1],"c_array_int":[1],"c_array_long":[1],"c_array_float":[1.1],"c_array_double":[1.1],"c_string":"string","c_int8":1,"c_int16":1,"c_int32":1,"c_int64":1,"c_float32":1.1,"c_float64":1.1,"c_decimal":1.1,"c_nullable":85,"c_lowcardinality":"string"},{"id":86,"c_array_string":["string"],"c_array_short":[1],"c_array_int":[1],"c_array_long":[1],"c_array_float":[1.1],"c_array_double":[1.1],"c_string":"string","c_int8":1,"c_int16":1,"c_int32":1,"c_int64":1,"c_float32":1.1,"c_float64":1.1,"c_decimal":1.1,"c_nullable":86,"c_lowcardinality":"string"},{"id":87,"c_array_string":["string"],"c_array_short":[1],"c_array_int":[1],"c_array_long":[1],"c_array_float":[1.1],"c_array_double":[1.1],"c_string":"string","c_int8":1,"c_int16":1,"c_int32":1,"c_int64":1,"c_float32":1.1,"c_float64":1.1,"c_decimal":1.1,"c_nullable":87,"c_lowcardinality":"string"},{"id":88,"c_array_string":["string"],"c_array_short":[1],"c_array_int":[1],"c_array_long":[1],"c_array_float":[1.1],"c_array_double":[1.1],"c_string":"string","c_int8":1,"c_int16":1,"c_int32":1,"c_int64":1,"c_float32":1.1,"c_float64":1.1,"c_decimal":1.1,"c_nullable":88,"c_lowcardinality":"string"},{"id":89,"c_array_string":["string"],"c_array_short":[1],"c_array_int":[1],"c_array_long":[1],"c_array_float":[1.1],"c_array_double":[1.1],"c_string":"string","c_int8":1,"c_int16":1,"c_int32":1,"c_int64":1,"c_float32":1.1,"c_float64":1.1,"c_decimal":1.1,"c_nullable":89,"c_lowcardinality":"string"},{"id":90,"c_array_string":["string"],"c_array_short":[1],"c_array_int":[1],"c_array_long":[1],"c_array_float":[1.1],"c_array_double":[1.1],"c_string":"string","c_int8":1,"c_int16":1,"c_int32":1,"c_int64":1,"c_float32":1.1,"c_float64":1.1,"c_decimal":1.1,"c_nullable":90,"c_lowcardinality":"string"},{"id":91,"c_array_string":["string"],"c_array_short":[1],"c_array_int":[1],"c_array_long":[1],"c_array_float":[1.1],"c_array_double":[1.1],"c_string":"string","c_int8":1,"c_int16":1,"c_int32":1,"c_int64":1,"c_float32":1.1,"c_float64":1.1,"c_decimal":1.1,"c_nullable":91,"c_lowcardinality":"string"},{"id":92,"c_array_string":["string"],"c_array_short":[1],"c_array_int":[1],"c_array_long":[1],"c_array_float":[1.1],"c_array_double":[1.1],"c_string":"string","c_int8":1,"c_int16":1,"c_int32":1,"c_int64":1,"c_float32":1.1,"c_float64":1.1,"c_decimal":1.1,"c_nullable":92,"c_lowcardinality":"string"},{"id":93,"c_array_string":["string"],"c_array_short":[1],"c_array_int":[1],"c_array_long":[1],"c_array_float":[1.1],"c_array_double":[1.1],"c_string":"string","c_int8":1,"c_int16":1,"c_int32":1,"c_int64":1,"c_float32":1.1,"c_float64":1.1,"c_decimal":1.1,"c_nullable":93,"c_lowcardinality":"string"},{"id":94,"c_array_string":["string"],"c_array_short":[1],"c_array_int":[1],"c_array_long":[1],"c_array_float":[1.1],"c_array_double":[1.1],"c_string":"string","c_int8":1,"c_int16":1,"c_int32":1,"c_int64":1,"c_float32":1.1,"c_float64":1.1,"c_decimal":1.1,"c_nullable":94,"c_lowcardinality":"string"},{"id":95,"c_array_string":["string"],"c_array_short":[1],"c_array_int":[1],"c_array_long":[1],"c_array_float":[1.1],"c_array_double":[1.1],"c_string":"string","c_int8":1,"c_int16":1,"c_int32":1,"c_int64":1,"c_float32":1.1,"c_float64":1.1,"c_decimal":1.1,"c_nullable":95,"c_lowcardinality":"string"},{"id":96,"c_array_string":["string"],"c_array_short":[1],"c_array_int":[1],"c_array_long":[1],"c_array_float":[1.1],"c_array_double":[1.1],"c_string":"string","c_int8":1,"c_int16":1,"c_int32":1,"c_int64":1,"c_float32":1.1,"c_float64":1.1,"c_decimal":1.1,"c_nullable":96,"c_lowcardinality":"string"},{"id":97,"c_array_string":["string"],"c_array_short":[1],"c_array_int":[1],"c_array_long":[1],"c_array_float":[1.1],"c_array_double":[1.1],"c_string":"string","c_int8":1,"c_int16":1,"c_int32":1,"c_int64":1,"c_float32":1.1,"c_float64":1.1,"c_decimal":1.1,"c_nullable":97,"c_lowcardinality":"string"},{"id":98,"c_array_string":["string"],"c_array_short":[1],"c_array_int":[1],"c_array_long":[1],"c_array_float":[1.1],"c_array_double":[1.1],"c_string":"string","c_int8":1,"c_int16":1,"c_int32":1,"c_int64":1,"c_float32":1.1,"c_float64":1.1,"c_decimal":1.1,"c_nullable":98,"c_lowcardinality":"string"},{"id":99,"c_array_string":["string"],"c_array_short":[1],"c_array_int":[1],"c_array_long":[1],"c_array_float":[1.1],"c_array_double":[1.1],"c_string":"string","c_int8":1,"c_int16":1,"c_int32":1,"c_int64":1,"c_float32":1.1,"c_float64":1.1,"c_decimal":1.1,"c_nullable":99,"c_lowcardinality":"string"}]' + format: json + interval.per.row: 10ms + repeat.count: -1 + json.ignore.parse.errors: false + + +sinks: + clickhouse_sink: + type: clickhouse + properties: + host: clickhouse:9000 + table: sink_table + connection.database: default + batch.size: 100 + batch.byte.size: 200MB + batch.interval: 1s + connection.user: ee9b0016824d59c8c191aa9633e4b61e + connection.password: ee9b0016824d59c8c191aa9633e4b61e + +application: # [object] Define job configuration + env: + name: example-inline-to-clickhouse + parallelism: 1 + shade.identifier: aes + pipeline: + object-reuse: true + topology: + - name: inline_source + downstream: [ clickhouse_sink ] + - name: clickhouse_sink + downstream: [] \ No newline at end of file diff --git a/groot-tests/test-e2e-connector-clickhouse/src/test/resources/init/clickhouse_test_sql.conf b/groot-tests/test-e2e-connector-clickhouse/src/test/resources/init/clickhouse_test_sql.conf new file mode 100644 index 0000000..f132795 --- /dev/null +++ b/groot-tests/test-e2e-connector-clickhouse/src/test/resources/init/clickhouse_test_sql.conf @@ -0,0 +1,81 @@ +source_table = """ +set allow_experimental_geo_types = 1; +create table if not exists `default`.source_table( + `id` Int64, + `c_array_string` Array(String), + `c_array_short` Array(Int16), + `c_array_int` Array(Int32), + `c_array_long` Array(Int64), + `c_array_float` Array(Float32), + `c_array_double` Array(Float64), + `c_string` String, + `c_int8` Int8, + `c_int16` Int16, + `c_int32` Int32, + `c_int64` Int64, + `c_float32` Float32, + `c_float64` Float64, + `c_decimal` Decimal(9,4), + `c_nullable` Nullable(Int32), + `c_lowcardinality` LowCardinality(String) +)engine=Memory; +""" + +sink_table = """ +create table if not exists `default`.sink_table( + `id` Int64, + `c_array_string` Array(String), + `c_array_short` Array(Int16), + `c_array_int` Array(Int32), + `c_array_long` Array(Int64), + `c_array_float` Array(Float32), + `c_array_double` Array(Float64), + `c_string` String, + `c_int8` Int8, + `c_int16` Int16, + `c_int32` Int32, + `c_int64` Int64, + `c_float32` Float32, + `c_float64` Float64, + `c_decimal` Decimal(9,4), + `c_nullable` Nullable(Int32), + `c_lowcardinality` LowCardinality(String) +)engine=Memory; +""" + +insert_sql = """ +insert into `default`.source_table +( + `id`, + `c_array_string`, + `c_array_short`, + `c_array_int`, + `c_array_long`, + `c_array_float`, + `c_array_double`, + `c_string`, + `c_int8`, + `c_int16`, + `c_int32`, + `c_int64`, + `c_float32`, + `c_float64`, + `c_decimal`, + `c_nullable`, + `c_lowcardinality` +) +values +(?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?) +""" + +compare_sql = """ +select + %s + from ( + select * from default.source_table +union all + select * from default.sink_table + ) +group by %s +having count(*) < 2 +""" \ No newline at end of file diff --git a/groot-tests/test-e2e-connector-clickhouse/src/test/resources/init/init-clickhouse.sql b/groot-tests/test-e2e-connector-clickhouse/src/test/resources/init/init-clickhouse.sql new file mode 100644 index 0000000..fd9daac --- /dev/null +++ b/groot-tests/test-e2e-connector-clickhouse/src/test/resources/init/init-clickhouse.sql @@ -0,0 +1,4 @@ +show databases; +-- ALTER USER default IDENTIFIED WITH plaintext_password BY 'testuser'; +CREATE USER testuser IDENTIFIED WITH plaintext_password BY 'testuser'; +GRANT ALL ON *.* TO testuser; \ No newline at end of file diff --git a/groot-tests/test-e2e-connector-clickhouse/src/test/resources/init/users.xml b/groot-tests/test-e2e-connector-clickhouse/src/test/resources/init/users.xml new file mode 100644 index 0000000..86a590d --- /dev/null +++ b/groot-tests/test-e2e-connector-clickhouse/src/test/resources/init/users.xml @@ -0,0 +1,29 @@ + + + + + default + 1 + 1 + 1 + 1 + + ::/0 + + + ALTER TABLE ON *.* + CREATE USER ON *.* + GRANT ON *.* + + + + + + + 10000000000 + 1 + random + 8 + + + diff --git a/groot-tests/test-e2e-connector-kafka/pom.xml b/groot-tests/test-e2e-connector-kafka/pom.xml new file mode 100644 index 0000000..5f98746 --- /dev/null +++ b/groot-tests/test-e2e-connector-kafka/pom.xml @@ -0,0 +1,63 @@ + + + 4.0.0 + + com.geedgenetworks + groot-tests + ${revision} + + + test-e2e-connector-kafka + Groot : Tests : E2E : Connector : Kafka + + + + + com.geedgenetworks + test-e2e-common + ${project.version} + test-jar + test + + + + org.testcontainers + kafka + ${testcontainer.version} + test + + + + com.geedgenetworks + connector-kafka + ${project.version} + test + + + + org.xerial.snappy + snappy-java + test + + + + + + + + + + org.apache.maven.plugins + maven-compiler-plugin + + 9 + 9 + + + + + + + \ No newline at end of file diff --git a/groot-tests/test-e2e-connector-kafka/src/test/java/com/geedgenetworks/test/e2e/connector/kafka/KafkaIT.java b/groot-tests/test-e2e-connector-kafka/src/test/java/com/geedgenetworks/test/e2e/connector/kafka/KafkaIT.java new file mode 100644 index 0000000..a1bd86b --- /dev/null +++ b/groot-tests/test-e2e-connector-kafka/src/test/java/com/geedgenetworks/test/e2e/connector/kafka/KafkaIT.java @@ -0,0 +1,374 @@ +package com.geedgenetworks.test.e2e.connector.kafka; + +import com.geedgenetworks.formats.json.JsonSerializer; +import com.geedgenetworks.api.connector.type.StructType; +import com.geedgenetworks.api.connector.type.Types; +import com.geedgenetworks.test.e2e.common.TestResource; +import com.geedgenetworks.test.e2e.common.TestSuiteBase; +import com.geedgenetworks.test.e2e.common.container.TestContainer; +import com.geedgenetworks.test.e2e.common.container.TestContainerId; +import com.geedgenetworks.test.e2e.common.junit.DisabledOnContainer; +import com.google.common.collect.Lists; +import lombok.extern.slf4j.Slf4j; +import org.apache.commons.lang3.StringUtils; +import org.apache.kafka.clients.admin.AdminClientConfig; +import org.apache.kafka.clients.consumer.*; +import org.apache.kafka.clients.producer.KafkaProducer; +import org.apache.kafka.clients.producer.ProducerConfig; +import org.apache.kafka.clients.producer.ProducerRecord; +import org.apache.kafka.common.TopicPartition; +import org.apache.kafka.common.config.SaslConfigs; +import org.apache.kafka.common.serialization.ByteArrayDeserializer; +import org.apache.kafka.common.serialization.ByteArraySerializer; +import org.apache.kafka.common.serialization.StringDeserializer; +import org.awaitility.Awaitility; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.TestTemplate; +import org.testcontainers.containers.Container; +import org.testcontainers.containers.KafkaContainer; +import org.testcontainers.containers.output.Slf4jLogConsumer; +import org.testcontainers.lifecycle.Startables;; +import org.testcontainers.shaded.org.apache.commons.lang3.RandomStringUtils; +import org.testcontainers.utility.DockerImageName; +import org.testcontainers.utility.DockerLoggerFactory; +import org.testcontainers.utility.MountableFile; + +import java.io.IOException; +import java.time.Duration; +import java.util.*; +import java.util.concurrent.CompletableFuture; +import java.util.concurrent.TimeUnit; +import java.util.stream.Stream; + +import static org.awaitility.Awaitility.await; + +@Slf4j +@DisabledOnContainer( + value = {TestContainerId.FLINK_1_17}, + disabledReason = "Override TestSuiteBase @DisabledOnContainer") +public class KafkaIT extends TestSuiteBase implements TestResource { + + private KafkaContainer kafkaContainer; + + private static final String KAFKA_IMAGE_NAME = "confluentinc/cp-kafka:7.4.0"; + private static final String KAFKA_HOST = "kafkaCluster"; + private KafkaProducer producer; + private static final String DEFAULT_TEST_TOPIC_SOURCE = "test_topic_source"; + private static final String DEFAULT_TEST_TOPIC_CONSUME_GROUP = "test-consume-group"; + + @Override + @BeforeAll + public void startUp() { + kafkaContainer = new KafkaContainer(DockerImageName.parse(KAFKA_IMAGE_NAME)) + .withNetwork(NETWORK) + .withNetworkAliases(KAFKA_HOST) + .withEnv("KAFKA_AUTO_CREATE_TOPICS_ENABLE", "true") + .withEnv("KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR", "1") + .withEnv("KAFKA_LISTENER_SECURITY_PROTOCOL_MAP", "PLAINTEXT:SASL_PLAINTEXT,BROKER:SASL_PLAINTEXT") + .withEnv("KAFKA_SASL_MECHANISM_INTER_BROKER_PROTOCOL", "PLAIN") + .withEnv("KAFKA_LISTENER_NAME_PLAINTEXT_SASL_ENABLED_MECHANISMS", "PLAIN") + .withEnv("KAFKA_LISTENER_NAME_BROKER_SASL_ENABLED_MECHANISMS", "PLAIN") + // .withEnv("KAFKA_AUTHORIZER_CLASS_NAME", "kafka.security.authorizer.AclAuthorizer") + .withEnv("KAFKA_SUPER_USERS", "User:admin") + .withEnv("KAFKA_OPTS", "-Djava.security.auth.login.config=/etc/kafka/kafka_server_jaas.conf") + .withCopyFileToContainer(MountableFile.forClasspathResource("kafka_server_jaas.conf"), "/etc/kafka/kafka_server_jaas.conf") + .withCopyFileToContainer(MountableFile.forClasspathResource("kafka_client_jass_cli.properties"), "/etc/kafka/kafka_client_jass_cli.properties") + .withLogConsumer(new Slf4jLogConsumer(DockerLoggerFactory.getLogger(KAFKA_IMAGE_NAME))); + Startables.deepStart(Stream.of(kafkaContainer)).join(); + log.info("Kafka container started successfully"); + Awaitility.given() + .ignoreExceptions() + .atLeast(100, TimeUnit.MILLISECONDS) + .pollInterval(500, TimeUnit.MILLISECONDS) + .atMost(180, TimeUnit.SECONDS) + .untilAsserted(this::initKafkaProducer); + + log.info("Write 100 records to topic test_topic_source"); + generateTestData(DEFAULT_TEST_TOPIC_SOURCE,0, 100); + + + } + + @TestTemplate + public void testKafkaAsSourceConsume(TestContainer container) { + generateTestData("test_topic_json", 0, 10); + CompletableFuture.supplyAsync( + () -> { + try { + return container.executeJob("/kafka_source.yaml"); + } catch (Exception e) { + log.error("Commit task exception :" + e.getMessage()); + throw new RuntimeException(e); + } + }); + + await().atMost(60000, TimeUnit.MILLISECONDS) + .untilAsserted( + () -> { + String logs = container.getServerLogs(); + Assertions.assertEquals(StringUtils.countMatches(logs, "PrintSinkFunction"), 10); + }); + } + + @TestTemplate + public void testKafkaAsSourceConsumeErrorSchema(TestContainer container) { + generateTestData("test_topic_error_json", 0, 10); + + CompletableFuture.supplyAsync( + () -> { + try { + Container.ExecResult execResult = container.executeJob("/kafka_source_error_schema.yaml"); + Assertions.assertEquals(0, execResult.getExitCode(), execResult.getStderr()); + return execResult; + } catch (Exception e) { + throw new RuntimeException(e); + } + }); + + await().atMost(60000, TimeUnit.MILLISECONDS) + .untilAsserted( + () -> { + String logs = container.getServerLogs(); + Assertions.assertTrue(StringUtils.contains(logs, "NumberFormatException")); + }); + } + + @TestTemplate + public void testKafkaAsSink(TestContainer container) throws IOException, InterruptedException { + CompletableFuture.supplyAsync( + () -> { + try { + Container.ExecResult execResult = container.executeJob("/kafka_sink.yaml"); + Assertions.assertEquals(0, execResult.getExitCode(), execResult.getStderr()); + return execResult; + } catch (Exception e) { + log.error("Commit task exception :" + e.getMessage()); + throw new RuntimeException(e); + } + }); + + + List data = Lists.newArrayList(); + await().atMost(60000, TimeUnit.MILLISECONDS) + .untilAsserted( + () -> { + data.addAll(getKafkaConsumerListData("test_sink_topic", null)); + Assertions.assertEquals(10, data.size()); // Check if all 10 records are consumed + }); + + } + + @TestTemplate + public void testKafkaAsSinkProducerQuota(TestContainer container) throws IOException, InterruptedException { + //Create topic with 3 partitions + executeShell("kafka-topics --create --topic SESSION-RECORD-QUOTA-TEST --bootstrap-server kafkaCluster:9092 --partitions 1 --replication-factor 1 --command-config /etc/kafka/kafka_client_jass_cli.properties"); + //Set producer quota to 2KB/s + executeShell("kafka-configs --bootstrap-server kafkaCluster:9092 --alter --add-config 'producer_byte_rate=2048' --entity-type users --entity-name admin --entity-type clients --entity-name SESSION-RECORD-QUOTA-TEST --command-config /etc/kafka/kafka_client_jass_cli.properties "); + + CompletableFuture.supplyAsync( + () -> { + try { + Container.ExecResult execResult = container.executeJob("/kafka_producer_quota.yaml"); + Assertions.assertEquals(0, execResult.getExitCode(), execResult.getStderr()); + return execResult; + } catch (Exception e) { + log.error("Commit task exception :" + e.getMessage()); + throw new RuntimeException(e); + } + }); + + List data = Lists.newArrayList(); + await().atMost(600000, TimeUnit.MILLISECONDS) + .untilAsserted( + () -> { + data.addAll(getKafkaConsumerListData("SESSION-RECORD-QUOTA-TEST", "test-consume-group-quota"+ RandomStringUtils.randomAlphabetic(5))); + Assertions.assertTrue(StringUtils.contains(container.getServerLogs(), "TimeoutException") && data.size()>100); + }); + + } + + + + @TestTemplate + public void testKafkaAsSinkHandleErrorJsonFormat(TestContainer container) throws IOException, InterruptedException { + CompletableFuture. supplyAsync( + () -> { + try { + Container.ExecResult execResult = container.executeJob("/kafka_sink_handle_error_json_format.yaml"); + Assertions.assertEquals(0, execResult.getExitCode(), execResult.getStderr()); + return execResult; + } catch (Exception e) { + log.error("Commit task exception :" + e.getMessage()); + throw new RuntimeException(e); + } + }); + + + List data = Lists.newArrayList(); + await().atMost(60000, TimeUnit.MILLISECONDS) + .untilAsserted( + () -> { + data.addAll(getKafkaConsumerListData("test_handle_error_json_format_topic", null)); + Assertions.assertTrue(StringUtils.contains(container.getServerLogs(), "UnsupportedOperationException")); + Assertions.assertEquals(0, data.size()); + }); + + + + } + + @TestTemplate + public void testKafkaSinkSkipErrorJsonFormat(TestContainer container) throws IOException, InterruptedException { + CompletableFuture.supplyAsync( + () -> { + try { + Container.ExecResult execResult = container.executeJob("/kafka_sink_skip_error_json_format.yaml"); + Assertions.assertEquals(0, execResult.getExitCode(), execResult.getStderr()); + return execResult; + } catch (Exception e) { + log.error("Commit task exception :" + e.getMessage()); + throw new RuntimeException(e); + } + }); + + + List data = Lists.newArrayList(); + await().atMost(60000, TimeUnit.MILLISECONDS) + .untilAsserted( + () -> { + data.addAll(getKafkaConsumerListData("test_skip_error_json_format_topic",null)); + Assertions.assertTrue(StringUtils.contains(container.getServerLogs(), "NullPointerException")); + Assertions.assertEquals(0, data.size()); + }); + } + + private void generateTestData(String topic, int start, int end) { + StructType dataType = Types.parseStructType("id: int, client_ip: string, server_ip: string, flag: string"); + JsonSerializer serializer = new JsonSerializer(dataType); + for (int i = start; i < end; i++) { + Map row = Map + .of("id", i, + "client_ip", "192.168.40.12", + "server_ip", "8.8.8.8" , + "flag", Boolean.FALSE.booleanValue()); + ProducerRecord record = + new ProducerRecord<>(topic, serializer.serialize(row)); + producer.send(record); + } + + } + + + @AfterAll + @Override + public void tearDown() throws Exception { + if (producer != null) { + producer.close(); + } + if (kafkaContainer != null) { + kafkaContainer.close(); + } + + } + + private void initKafkaProducer() { + Properties properties = new Properties(); + String bootstrapServers = kafkaContainer.getBootstrapServers(); + properties.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapServers); + properties.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, ByteArraySerializer.class); + properties.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, ByteArraySerializer.class); + properties.put(AdminClientConfig.SECURITY_PROTOCOL_CONFIG, "SASL_PLAINTEXT"); + properties.put(SaslConfigs.SASL_MECHANISM, "PLAIN"); + properties.put(SaslConfigs.SASL_JAAS_CONFIG, "org.apache.kafka.common.security.plain.PlainLoginModule required username=\"admin\" password=\"admin\";"); + producer = new KafkaProducer<>(properties); + } + + private Properties kafkaConsumerConfig(String consumeGroup) { + Properties properties = new Properties(); + properties.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, kafkaContainer.getBootstrapServers()); + properties.put(ConsumerConfig.GROUP_ID_CONFIG, consumeGroup); + properties.put(AdminClientConfig.SECURITY_PROTOCOL_CONFIG, "SASL_PLAINTEXT"); + properties.put(SaslConfigs.SASL_MECHANISM, "PLAIN"); + properties.put(SaslConfigs.SASL_JAAS_CONFIG, "org.apache.kafka.common.security.plain.PlainLoginModule required username=\"admin\" password=\"admin\";"); + properties.put( + ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, + OffsetResetStrategy.EARLIEST.toString().toLowerCase()); + properties.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class); + properties.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class); + return properties; + } + + private Properties kafkaByteConsumerConfig() { + Properties props = new Properties(); + props.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, kafkaContainer.getBootstrapServers()); + props.put(ConsumerConfig.GROUP_ID_CONFIG, "test-consume-group"); + props.put( + ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, + OffsetResetStrategy.EARLIEST.toString().toLowerCase()); + props.setProperty( + ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, + ByteArrayDeserializer.class.getName()); + props.setProperty( + ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, + ByteArrayDeserializer.class.getName()); + return props; + } + + private Map getKafkaConsumerData(String topicName) { + Map data = new HashMap<>(); + try (KafkaConsumer consumer = new KafkaConsumer<>(kafkaConsumerConfig(DEFAULT_TEST_TOPIC_CONSUME_GROUP))) { + consumer.subscribe(Arrays.asList(topicName)); + Map offsets = + consumer.endOffsets(Arrays.asList(new TopicPartition(topicName, 0))); + Long endOffset = offsets.entrySet().iterator().next().getValue(); + Long lastProcessedOffset = -1L; + + do { + ConsumerRecords records = consumer.poll(Duration.ofMillis(100)); + for (ConsumerRecord record : records) { + if (lastProcessedOffset < record.offset()) { + data.put(record.key(), record.value()); + } + lastProcessedOffset = record.offset(); + } + } while (lastProcessedOffset < endOffset - 1); + } + return data; + } + + private List getKafkaConsumerListData(String topicName, String consumeGroup) { + List data = new ArrayList<>(); + consumeGroup = StringUtils.isBlank(consumeGroup) ? DEFAULT_TEST_TOPIC_CONSUME_GROUP : consumeGroup; + try (KafkaConsumer consumer = new KafkaConsumer<>(kafkaConsumerConfig(consumeGroup))) { + consumer.subscribe(Arrays.asList(topicName)); + Map offsets = + consumer.endOffsets(Arrays.asList(new TopicPartition(topicName, 0))); + Long endOffset = offsets.entrySet().iterator().next().getValue(); + Long lastProcessedOffset = -1L; + + do { + ConsumerRecords records = consumer.poll(Duration.ofMillis(100)); + for (ConsumerRecord record : records) { + if (lastProcessedOffset < record.offset()) { + data.add(record.value()); + } + lastProcessedOffset = record.offset(); + } + } while (lastProcessedOffset < endOffset - 1); + } + return data; + } + + private void executeShell(String command) { + try { + Container.ExecResult result = kafkaContainer.execInContainer("/bin/sh", "-c", command); + log.info("Execute shell command result: {},{}", result.getStdout(), result.getStderr()); + + } catch (Exception e) { + log.error("Execute shell command error: {}", e.getMessage()); + } + } + +} diff --git a/groot-tests/test-e2e-connector-kafka/src/test/resources/kafka_client_jass_cli.properties b/groot-tests/test-e2e-connector-kafka/src/test/resources/kafka_client_jass_cli.properties new file mode 100644 index 0000000..986cdb9 --- /dev/null +++ b/groot-tests/test-e2e-connector-kafka/src/test/resources/kafka_client_jass_cli.properties @@ -0,0 +1,3 @@ +security.protocol=SASL_PLAINTEXT +sasl.mechanism=PLAIN +sasl.jaas.config=org.apache.kafka.common.security.plain.PlainLoginModule required username="admin" password="admin"; \ No newline at end of file diff --git a/groot-tests/test-e2e-connector-kafka/src/test/resources/kafka_producer_quota.yaml b/groot-tests/test-e2e-connector-kafka/src/test/resources/kafka_producer_quota.yaml new file mode 100644 index 0000000..8c2ad8d --- /dev/null +++ b/groot-tests/test-e2e-connector-kafka/src/test/resources/kafka_producer_quota.yaml @@ -0,0 +1,120 @@ +sources: # [object] Define connector source + mock_source: + type: mock + properties: + mock.desc.file.path: /tmp/grootstream/config/template/mock_schema/session_record_mock_desc.json + rows.per.second: 10000 + +processing_pipelines: + etl_processor: + type: projection + functions: + - function: SNOWFLAKE_ID + lookup_fields: [''] + output_fields: [log_id] + parameters: + data_center_id_num: 1 + - function: UNIX_TIMESTAMP_CONVERTER + lookup_fields: [ __timestamp ] + output_fields: [ recv_time ] + parameters: + precision: seconds + - function: SNOWFLAKE_ID + lookup_fields: [ '' ] + output_fields: [ session_id ] + parameters: + data_center_id_num: 2 + - function: EVAL + output_fields: [ ingestion_time ] + parameters: + value_expression: recv_time + + - function: DOMAIN + lookup_fields: [ http_host, ssl_sni, dtls_sni, quic_sni ] + output_fields: [ server_domain ] + parameters: + option: FIRST_SIGNIFICANT_SUBDOMAIN + + - function: ASN_LOOKUP + lookup_fields: [ client_ip ] + output_fields: [ client_asn ] + parameters: + kb_name: tsg_ip_asn + option: IP_TO_ASN + + - function: ASN_LOOKUP + lookup_fields: [ server_ip ] + output_fields: [ server_asn ] + parameters: + kb_name: tsg_ip_asn + option: IP_TO_ASN + + - function: GEOIP_LOOKUP + lookup_fields: [ client_ip ] + output_fields: [] + parameters: + kb_name: tsg_ip_location + option: IP_TO_OBJECT + geolocation_field_mapping: + COUNTRY: client_country + PROVINCE: client_super_administrative_area + CITY: client_administrative_area + + - function: GEOIP_LOOKUP + lookup_fields: [ server_ip ] + output_fields: [] + parameters: + kb_name: tsg_ip_location + option: IP_TO_OBJECT + geolocation_field_mapping: + COUNTRY: server_country + PROVINCE: server_super_administrative_area + CITY: server_administrative_area + + + - function: CURRENT_UNIX_TIMESTAMP + output_fields: [ processing_time ] + parameters: + precision: seconds + + +sinks: + print_sink: + type: print + properties: + mode: log_info + format: json + + kafka_sink: + type: kafka + properties: + topic: SESSION-RECORD-QUOTA-TEST + kafka.bootstrap.servers: kafkaCluster:9092 + kafka.client.id: SESSION-RECORD-QUOTA-TEST + kafka.linger.ms: 10 + kafka.request.timeout.ms: 30000 + kafka.batch.size: 262144 + kafka.buffer.memory: 134217728 + kafka.max.request.size: 10485760 + kafka.security.protocol: SASL_PLAINTEXT + kafka.sasl.mechanism: PLAIN + kafka.sasl.jaas.config: org.apache.kafka.common.security.plain.PlainLoginModule required username="admin" password="admin"; + kafka.compression.type: snappy + format: json + json.ignore.parse.errors: false + log.failures.only: true + +application: # [object] Define job configuration + env: + name: kafka_producer_quota + parallelism: 1 + shade.identifier: default + pipeline: + object-reuse: true + topology: + - name: mock_source + downstream: [ etl_processor ] + - name: etl_processor + downstream: [ kafka_sink ] + - name: kafka_sink + downstream: [] \ No newline at end of file diff --git a/groot-tests/test-e2e-connector-kafka/src/test/resources/kafka_server_jaas.conf b/groot-tests/test-e2e-connector-kafka/src/test/resources/kafka_server_jaas.conf new file mode 100644 index 0000000..cb4553f --- /dev/null +++ b/groot-tests/test-e2e-connector-kafka/src/test/resources/kafka_server_jaas.conf @@ -0,0 +1,8 @@ +KafkaServer { + org.apache.kafka.common.security.plain.PlainLoginModule required + username="admin" + password="admin" + user_admin="admin" + user_firewall="admin" + user_olap="admin"; +}; diff --git a/groot-tests/test-e2e-connector-kafka/src/test/resources/kafka_sink.yaml b/groot-tests/test-e2e-connector-kafka/src/test/resources/kafka_sink.yaml new file mode 100644 index 0000000..e12e76b --- /dev/null +++ b/groot-tests/test-e2e-connector-kafka/src/test/resources/kafka_sink.yaml @@ -0,0 +1,68 @@ +sources: # [object] Define connector source + inline_source: + type: inline + schema: + fields: # [array of object] Schema field projection, support read data only from specified fields. + - name: log_id + type: bigint + - name: recv_time + type: bigint + - name: server_fqdn + type: string + - name: server_domain + type: string + - name: client_ip + type: string + - name: server_ip + type: string + - name: server_asn + type: string + - name: decoded_as + type: string + - name: device_group + type: string + - name: device_tag + type: string + properties: + # + # [string] Event Data, it will be parsed to Map by the specified format. + # + data: '{"recv_time": 1705565615, "log_id":206211012872372224, "tcp_rtt_ms":128,"decoded_as":"HTTP", "http_version":"http1","http_request_line":"GET / HTTP/1.1","http_host":"www.ct.cn","http_url":"www.ct.cn/","http_user_agent":"curl/8.0.1","http_status_code":200,"http_response_line":"HTTP/1.1 200 OK","http_response_content_type":"text/html; charset=UTF-8","http_response_latency_ms":31,"http_session_duration_ms":5451,"in_src_mac":"ba:bb:a7:3c:67:1c","in_dest_mac":"86:dd:7a:8f:ae:e2","out_src_mac":"86:dd:7a:8f:ae:e2","out_dest_mac":"ba:bb:a7:3c:67:1c","tcp_client_isn":678677906,"tcp_server_isn":1006700307,"address_type":4,"client_ip":"192.11.22.22","server_ip":"8.8.8.8","client_port":42751,"server_port":80,"in_link_id":65535,"out_link_id":65535,"start_timestamp_ms":1703646546127,"end_timestamp_ms":1703646551702,"duration_ms":5575,"sent_pkts":97,"sent_bytes":5892,"received_pkts":250,"received_bytes":333931,"tcp_c2s_ip_fragments":0,"tcp_s2c_ip_fragments":0,"tcp_c2s_rtx_pkts":0,"tcp_c2s_rtx_bytes":0,"tcp_s2c_rtx_pkts":0,"tcp_s2c_rtx_bytes":0,"tcp_c2s_o3_pkts":0,"tcp_s2c_o3_pkts":0,"tcp_c2s_lost_bytes":0,"tcp_s2c_lost_bytes":0,"flags":26418,"flags_identify_info":[100,1,100,60,150,100,1,2],"app_transition":"http.1111.test_1_1","decoded_as":"HTTP","server_fqdn":"www.ct.cn","app":"test_1_1","decoded_path":"ETHERNET.IPv4.TCP.http","fqdn_category_list":[1767],"t_vsys_id":1,"vsys_id":1,"session_id":290538039798223400,"tcp_handshake_latency_ms":41,"client_os_desc":"Windows","server_os_desc":"Linux","data_center":"center-xxg-tsgx","device_group":"group-xxg-tsgx","device_tag":"{\"tags\":[{\"tag\":\"data_center\",\"value\":\"center-xxg-tsgx\"},{\"tag\":\"device_group\",\"value\":\"group-xxg-tsgx\"}]}","device_id":"9800165603247024","sled_ip":"192.168.40.39","dup_traffic_flag":0}' + format: json + interval.per.row: 1s + repeat.count: 10 + json.ignore.parse.errors: false + + +sinks: + connector_kafka: + type: kafka + properties: + topic: test_sink_topic + kafka.bootstrap.servers: kafkaCluster:9092 + kafka.client.id: test_sink_topic + kafka.security.protocol: SASL_PLAINTEXT + kafka.sasl.mechanism: PLAIN + kafka.sasl.jaas.config: org.apache.kafka.common.security.plain.PlainLoginModule required username="admin" password="admin"; + kafka.retries: 0 + kafka.linger.ms: 10 + kafka.request.timeout.ms: 30000 + kafka.batch.size: 262144 + kafka.buffer.memory: 134217728 + kafka.max.request.size: 10485760 + kafka.compression.type: snappy + format: json + log.failures.only: true + +application: # [object] Define job configuration + env: + name: example-inline-to-kafka + parallelism: 1 + shade.identifier: default + pipeline: + object-reuse: true + topology: + - name: inline_source + downstream: [ connector_kafka ] + - name: connector_kafka + downstream: [] \ No newline at end of file diff --git a/groot-tests/test-e2e-connector-kafka/src/test/resources/kafka_sink_handle_error_json_format.yaml b/groot-tests/test-e2e-connector-kafka/src/test/resources/kafka_sink_handle_error_json_format.yaml new file mode 100644 index 0000000..d65157a --- /dev/null +++ b/groot-tests/test-e2e-connector-kafka/src/test/resources/kafka_sink_handle_error_json_format.yaml @@ -0,0 +1,67 @@ +sources: # [object] Define connector source + inline_source: + type: inline + schema: + fields: # [array of object] Schema field projection, support read data only from specified fields. + - name: log_id + type: bigint + - name: recv_time + type: bigint + - name: server_fqdn + type: string + - name: server_domain + type: string + - name: client_ip + type: string + - name: server_ip + type: string + - name: server_asn + type: string + - name: decoded_as + type: string + - name: device_group + type: string + - name: device_tag + type: bigint + properties: + # + # [string] Event Data, it will be parsed to Map by the specified format. + # + data: '{"recv_time": 1705565615, "log_id":206211012872372224, "tcp_rtt_ms":128,"decoded_as":"HTTP", "http_version":"http1","http_request_line":"GET / HTTP/1.1","http_host":"www.ct.cn","http_url":"www.ct.cn/","http_user_agent":"curl/8.0.1","http_status_code":200,"http_response_line":"HTTP/1.1 200 OK","http_response_content_type":"text/html; charset=UTF-8","http_response_latency_ms":31,"http_session_duration_ms":5451,"in_src_mac":"ba:bb:a7:3c:67:1c","in_dest_mac":"86:dd:7a:8f:ae:e2","out_src_mac":"86:dd:7a:8f:ae:e2","out_dest_mac":"ba:bb:a7:3c:67:1c","tcp_client_isn":678677906,"tcp_server_isn":1006700307,"address_type":4,"client_ip":"192.11.22.22","server_ip":"8.8.8.8","client_port":42751,"server_port":80,"in_link_id":65535,"out_link_id":65535,"start_timestamp_ms":1703646546127,"end_timestamp_ms":1703646551702,"duration_ms":5575,"sent_pkts":97,"sent_bytes":5892,"received_pkts":250,"received_bytes":333931,"tcp_c2s_ip_fragments":0,"tcp_s2c_ip_fragments":0,"tcp_c2s_rtx_pkts":0,"tcp_c2s_rtx_bytes":0,"tcp_s2c_rtx_pkts":0,"tcp_s2c_rtx_bytes":0,"tcp_c2s_o3_pkts":0,"tcp_s2c_o3_pkts":0,"tcp_c2s_lost_bytes":0,"tcp_s2c_lost_bytes":0,"flags":26418,"flags_identify_info":[100,1,100,60,150,100,1,2],"app_transition":"http.1111.test_1_1","decoded_as":"HTTP","server_fqdn":"www.ct.cn","app":"test_1_1","decoded_path":"ETHERNET.IPv4.TCP.http","fqdn_category_list":[1767],"t_vsys_id":1,"vsys_id":1,"session_id":290538039798223400,"tcp_handshake_latency_ms":41,"client_os_desc":"Windows","server_os_desc":"Linux","data_center":"center-xxg-tsgx","device_group":"group-xxg-tsgx","device_tag":"{\"tags\":[{\"tag\":\"data_center\",\"value\":\"center-xxg-tsgx\"},{\"tag\":\"device_group\",\"value\":\"group-xxg-tsgx\"}]}","device_id":"9800165603247024","sled_ip":"192.168.40.39","dup_traffic_flag":0}' + format: json + interval.per.row: 1s + repeat.count: 10 + json.ignore.parse.errors: false + + +sinks: + connector_kafka: + type: kafka + properties: + topic: test_handle_error_json_format_topic + kafka.bootstrap.servers: kafkaCluster:9092 + kafka.security.protocol: SASL_PLAINTEXT + kafka.sasl.mechanism: PLAIN + kafka.sasl.jaas.config: org.apache.kafka.common.security.plain.PlainLoginModule required username="admin" password="admin"; + kafka.retries: 0 + kafka.linger.ms: 10 + kafka.request.timeout.ms: 30000 + kafka.batch.size: 262144 + kafka.buffer.memory: 134217728 + kafka.max.request.size: 10485760 + kafka.compression.type: snappy + format: json + log.failures.only: true + +application: # [object] Define job configuration + env: + name: example-inline-to-kafka + parallelism: 1 + shade.identifier: default + pipeline: + object-reuse: true + topology: + - name: inline_source + downstream: [ connector_kafka ] + - name: connector_kafka + downstream: [] \ No newline at end of file diff --git a/groot-tests/test-e2e-connector-kafka/src/test/resources/kafka_sink_skip_error_json_format.yaml b/groot-tests/test-e2e-connector-kafka/src/test/resources/kafka_sink_skip_error_json_format.yaml new file mode 100644 index 0000000..d9cb80f --- /dev/null +++ b/groot-tests/test-e2e-connector-kafka/src/test/resources/kafka_sink_skip_error_json_format.yaml @@ -0,0 +1,67 @@ +sources: # [object] Define connector source + inline_source: + type: inline + schema: + fields: # [array of object] Schema field projection, support read data only from specified fields. + - name: log_id + type: bigint + - name: recv_time + type: bigint + - name: server_fqdn + type: string + - name: server_domain + type: string + - name: client_ip + type: string + - name: server_ip + type: string + - name: server_asn + type: string + - name: decoded_as + type: string + - name: device_group + type: string + - name: device_tag + type: bigint + properties: + # + # [string] Event Data, it will be parsed to Map by the specified format. + # + data: '{"recv_time": 1705565615, "log_id":206211012872372224, "tcp_rtt_ms":128,"decoded_as":"HTTP", "http_version":"http1","http_request_line":"GET / HTTP/1.1","http_host":"www.ct.cn","http_url":"www.ct.cn/","http_user_agent":"curl/8.0.1","http_status_code":200,"http_response_line":"HTTP/1.1 200 OK","http_response_content_type":"text/html; charset=UTF-8","http_response_latency_ms":31,"http_session_duration_ms":5451,"in_src_mac":"ba:bb:a7:3c:67:1c","in_dest_mac":"86:dd:7a:8f:ae:e2","out_src_mac":"86:dd:7a:8f:ae:e2","out_dest_mac":"ba:bb:a7:3c:67:1c","tcp_client_isn":678677906,"tcp_server_isn":1006700307,"address_type":4,"client_ip":"192.11.22.22","server_ip":"8.8.8.8","client_port":42751,"server_port":80,"in_link_id":65535,"out_link_id":65535,"start_timestamp_ms":1703646546127,"end_timestamp_ms":1703646551702,"duration_ms":5575,"sent_pkts":97,"sent_bytes":5892,"received_pkts":250,"received_bytes":333931,"tcp_c2s_ip_fragments":0,"tcp_s2c_ip_fragments":0,"tcp_c2s_rtx_pkts":0,"tcp_c2s_rtx_bytes":0,"tcp_s2c_rtx_pkts":0,"tcp_s2c_rtx_bytes":0,"tcp_c2s_o3_pkts":0,"tcp_s2c_o3_pkts":0,"tcp_c2s_lost_bytes":0,"tcp_s2c_lost_bytes":0,"flags":26418,"flags_identify_info":[100,1,100,60,150,100,1,2],"app_transition":"http.1111.test_1_1","decoded_as":"HTTP","server_fqdn":"www.ct.cn","app":"test_1_1","decoded_path":"ETHERNET.IPv4.TCP.http","fqdn_category_list":[1767],"t_vsys_id":1,"vsys_id":1,"session_id":290538039798223400,"tcp_handshake_latency_ms":41,"client_os_desc":"Windows","server_os_desc":"Linux","data_center":"center-xxg-tsgx","device_group":"group-xxg-tsgx","device_tag":"{\"tags\":[{\"tag\":\"data_center\",\"value\":\"center-xxg-tsgx\"},{\"tag\":\"device_group\",\"value\":\"group-xxg-tsgx\"}]}","device_id":"9800165603247024","sled_ip":"192.168.40.39","dup_traffic_flag":0}' + format: json + interval.per.row: 1s + repeat.count: 10 + json.ignore.parse.errors: true + + +sinks: + connector_kafka: + type: kafka + properties: + topic: test_skip_error_json_format_topic + kafka.bootstrap.servers: kafkaCluster:9092 + kafka.security.protocol: SASL_PLAINTEXT + kafka.sasl.mechanism: PLAIN + kafka.sasl.jaas.config: org.apache.kafka.common.security.plain.PlainLoginModule required username="admin" password="admin"; + kafka.retries: 0 + kafka.linger.ms: 10 + kafka.request.timeout.ms: 30000 + kafka.batch.size: 262144 + kafka.buffer.memory: 134217728 + kafka.max.request.size: 10485760 + kafka.compression.type: snappy + format: json + log.failures.only: true + +application: # [object] Define job configuration + env: + name: example-inline-to-kafka + parallelism: 1 + shade.identifier: default + pipeline: + object-reuse: true + topology: + - name: inline_source + downstream: [ connector_kafka ] + - name: connector_kafka + downstream: [] \ No newline at end of file diff --git a/groot-tests/test-e2e-connector-kafka/src/test/resources/kafka_source.yaml b/groot-tests/test-e2e-connector-kafka/src/test/resources/kafka_source.yaml new file mode 100644 index 0000000..3403ab9 --- /dev/null +++ b/groot-tests/test-e2e-connector-kafka/src/test/resources/kafka_source.yaml @@ -0,0 +1,41 @@ +sources: + kafka_source: + type : kafka + schema: + fields: # [array of object] Schema field projection, support read data only from specified fields. + - name: client_ip + type: string + - name: server_ip + type: string + properties: # [object] Kafka source properties + topic: test_topic_json + kafka.bootstrap.servers: kafkaCluster:9092 + kafka.session.timeout.ms: 60000 + kafka.max.poll.records: 3000 + kafka.max.partition.fetch.bytes: 31457280 + kafka.security.protocol: SASL_PLAINTEXT + kafka.sasl.mechanism: PLAIN + kafka.sasl.jaas.config: org.apache.kafka.common.security.plain.PlainLoginModule required username="admin" password="admin"; + kafka.group.id: test_topic_json_group + kafka.auto.offset.reset: earliest + format: json + +sinks: # [object] Define connector sink + print_sink: + type: print + properties: + mode: log_warn + format: json + +application: # [object] Define job configuration + env: + name: example-kafka-to-print + parallelism: 1 + shade.identifier: default + pipeline: + object-reuse: true + topology: + - name: kafka_source + downstream: [print_sink] + - name: print_sink + downstream: [] \ No newline at end of file diff --git a/groot-tests/test-e2e-connector-kafka/src/test/resources/kafka_source_error_schema.yaml b/groot-tests/test-e2e-connector-kafka/src/test/resources/kafka_source_error_schema.yaml new file mode 100644 index 0000000..1016560 --- /dev/null +++ b/groot-tests/test-e2e-connector-kafka/src/test/resources/kafka_source_error_schema.yaml @@ -0,0 +1,42 @@ +sources: + kafka_source: + type : kafka + properties: # [object] Kafka source properties + topic: test_topic_error_json + kafka.bootstrap.servers: kafkaCluster:9092 + kafka.security.protocol: SASL_PLAINTEXT + kafka.sasl.mechanism: PLAIN + kafka.sasl.jaas.config: org.apache.kafka.common.security.plain.PlainLoginModule required username="admin" password="admin"; + kafka.session.timeout.ms: 60000 + kafka.max.poll.records: 3000 + kafka.max.partition.fetch.bytes: 31457280 + kafka.group.id: test_topic_error_json_group + kafka.auto.offset.reset: earliest + format: json + +sinks: # [object] Define connector sink + print_sink: + type: print + schema: + fields: # [array of object] Schema field projection, support read data only from specified fields. + - name: client_ip + type: string + - name: server_ip + type: bigint + properties: + mode: log_warn + format: json + json.ignore.parse.errors: false + + +application: # [object] Define job configuration + env: + name: example-kafka-to-print + parallelism: 1 + pipeline: + object-reuse: true + topology: + - name: kafka_source + downstream: [print_sink] + - name: print_sink + downstream: [] \ No newline at end of file diff --git a/groot-tests/test-e2e-core/pom.xml b/groot-tests/test-e2e-core/pom.xml new file mode 100644 index 0000000..25e0cf9 --- /dev/null +++ b/groot-tests/test-e2e-core/pom.xml @@ -0,0 +1,49 @@ + + + 4.0.0 + + com.geedgenetworks + groot-tests + ${revision} + + + test-e2e-core + Groot : Tests : E2E : Core + + + + + + + + com.geedgenetworks + test-e2e-common + ${project.version} + test-jar + test + + + + + + + org.apache.maven.plugins + maven-jar-plugin + ${maven-jar-plugin.version} + + false + + + + + test-jar + + + + + + + + \ No newline at end of file diff --git a/groot-tests/test-e2e-core/src/test/java/com/geedgenetworks/test/e2e/core/EnvParameterIT.java b/groot-tests/test-e2e-core/src/test/java/com/geedgenetworks/test/e2e/core/EnvParameterIT.java new file mode 100644 index 0000000..1a24e6f --- /dev/null +++ b/groot-tests/test-e2e-core/src/test/java/com/geedgenetworks/test/e2e/core/EnvParameterIT.java @@ -0,0 +1,198 @@ +package com.geedgenetworks.test.e2e.core; + +import com.alibaba.fastjson2.JSON; +import com.alibaba.fastjson2.TypeReference; +import com.alibaba.nacos.client.naming.utils.CollectionUtils; +import com.geedgenetworks.test.e2e.common.TestSuiteBase; +import com.geedgenetworks.test.e2e.common.container.AbstractTestFlinkContainer; +import com.geedgenetworks.test.e2e.common.container.ContainerExtendedFactory; +import com.geedgenetworks.test.e2e.common.container.TestContainerId; +import com.geedgenetworks.test.e2e.common.junit.DisabledOnContainer; +import com.geedgenetworks.test.e2e.common.junit.TestContainerExtension; +import lombok.extern.slf4j.Slf4j; +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.TestTemplate; +import org.testcontainers.containers.Container; + +import java.io.IOException; +import java.util.List; +import java.util.Map; +import java.util.concurrent.CompletableFuture; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.atomic.AtomicReference; + +import static org.awaitility.Awaitility.await; + +@Slf4j +@DisabledOnContainer( + value = {TestContainerId.FLINK_1_17}, + type = {}, + disabledReason = "only flink adjusts the parameter configuration rules") +public class EnvParameterIT extends TestSuiteBase { + @TestContainerExtension + protected final ContainerExtendedFactory extendedFactory = + container -> { + Container.ExecResult extraCommands = + container.execInContainer( + "bash", + "-c", + "mkdir -p /tmp/grootstream && chown -R flink /tmp/grootstream"); + Assertions.assertEquals(0, extraCommands.getExitCode(), extraCommands.getStderr()); + }; + + @TestTemplate + public void testGeneralEnvParameter(AbstractTestFlinkContainer container) + throws IOException, InterruptedException { + genericTest( + "/test_env_parameter_inline_to_print.yaml", container); + } + + + public void genericTest(String configPath, AbstractTestFlinkContainer container) + throws IOException, InterruptedException { + CompletableFuture.supplyAsync( + () -> { + try { + return container.executeJob(configPath); + } catch (Exception e) { + log.error("Commit task exception :" + e.getMessage()); + throw new RuntimeException(e); + } + }); + // wait obtain job id + AtomicReference jobId = new AtomicReference<>(); + await().atMost(300000, TimeUnit.MILLISECONDS) + .untilAsserted( + () -> { + Map jobInfo = JSON.parseObject(container.executeJobManagerInnerCommand( + "curl http://localhost:8081/jobs/overview"), new TypeReference>() { + }); + List> jobs = + (List>) jobInfo.get("jobs"); + if (!CollectionUtils.isEmpty(jobs)) { + jobId.set(jobs.get(0).get("jid").toString()); + } + Assertions.assertNotNull(jobId.get()); + }); + + // obtain job info + AtomicReference> jobInfoReference = new AtomicReference<>(); + await().atMost(60000, TimeUnit.MILLISECONDS) + .untilAsserted( + () -> { + Map jobInfo = JSON.parseObject( container.executeJobManagerInnerCommand( + String.format( + "curl http://localhost:8081/jobs/%s", + jobId.get())), new TypeReference>() { + }); + + // wait the job initialization is complete and enters the Running state + if (null != jobInfo && "RUNNING".equals(jobInfo.get("state"))) { + jobInfoReference.set(jobInfo); + } + Assertions.assertNotNull(jobInfoReference.get()); + }); + Map jobInfo = jobInfoReference.get(); + + // obtain execution configuration + Map jobConfig = JSON.parseObject(container.executeJobManagerInnerCommand( + String.format( + "curl http://localhost:8081/jobs/%s/config", jobId.get())), new TypeReference>() { + }); + + Map executionConfig = + (Map) jobConfig.get("execution-config"); + + // obtain checkpoint configuration + Map checkpointConfig = + JSON.parseObject(container.executeJobManagerInnerCommand( + String.format( + "curl http://localhost:8081/jobs/%s/checkpoints/config", jobId.get())), new TypeReference>() { + }); + + // obtain checkpoint storage + AtomicReference> completedCheckpointReference = new AtomicReference<>(); + await().atMost(60000, TimeUnit.MILLISECONDS) + .untilAsserted( + () -> { + Map checkpointsInfo = + JSON.parseObject(container.executeJobManagerInnerCommand( + String.format( + "curl http://localhost:8081/jobs/%s/checkpoints", jobId.get())), new TypeReference>() { + }); + Map latestCheckpoint = + (Map) checkpointsInfo.get("latest"); + // waiting for at least one checkpoint trigger + if (null != latestCheckpoint) { + completedCheckpointReference.set( + (Map) latestCheckpoint.get("completed")); + Assertions.assertNotNull(completedCheckpointReference.get()); + } + }); + /** + * adjust the configuration of this {@link + * com.geedgenetworks.bootstrap.execution.ExecutionConfigKeyName} to use the 'flink.' and the + * flink parameter name, and check whether the configuration takes effect + */ + // PARALLELISM + int parallelism = (int) executionConfig.get("job-parallelism"); + Assertions.assertEquals(1, parallelism); + + // MAX_PARALLELISM + int maxParallelism = (int) jobInfo.get("maxParallelism"); + Assertions.assertEquals(5, maxParallelism); + + // CHECKPOINT_INTERVAL + int interval = (int) checkpointConfig.get("interval"); + Assertions.assertEquals(10000, interval); + + // CHECKPOINT_MODE + String mode = checkpointConfig.get("mode").toString(); + Assertions.assertEquals("exactly_once", mode); + + // CHECKPOINT_TIMEOUT + int checkpointTimeout = (int) checkpointConfig.get("timeout"); + Assertions.assertEquals(1200000, checkpointTimeout); + + // CHECKPOINT_DATA_URI + String externalPath = completedCheckpointReference.get().get("external_path").toString(); + Assertions.assertTrue(externalPath.startsWith("file:/tmp/grootstream/checkpoints")); + + // MAX_CONCURRENT_CHECKPOINTS + int maxConcurrent = (int) checkpointConfig.get("max_concurrent"); + Assertions.assertEquals(2, maxConcurrent); + + // CHECKPOINT_CLEANUP_MODE + Map externalizationMap = + (Map) checkpointConfig.get("externalization"); + boolean externalization = (boolean) externalizationMap.get("delete_on_cancellation"); + Assertions.assertTrue(externalization); + + // MIN_PAUSE_BETWEEN_CHECKPOINTS + int minPause = (int) checkpointConfig.get("min_pause"); + Assertions.assertEquals(100, minPause); + + // FAIL_ON_CHECKPOINTING_ERRORS + int tolerableFailedCheckpoints = (int) checkpointConfig.get("tolerable_failed_checkpoints"); + Assertions.assertEquals(5, tolerableFailedCheckpoints); + + // RESTART_STRATEGY / because the restart strategy is fixed-delay in config file, so don't + // check failure-rate + String restartStrategy = executionConfig.get("restart-strategy").toString(); + Assertions.assertTrue(restartStrategy.contains("fixed delay")); + + // RESTART_ATTEMPTS + Assertions.assertTrue(restartStrategy.contains("2 restart attempts")); + + // RESTART_DELAY_BETWEEN_ATTEMPTS + Assertions.assertTrue(restartStrategy.contains("fixed delay (1000 ms)")); + + // STATE_BACKEND + String stateBackend = checkpointConfig.get("state_backend").toString(); + Assertions.assertTrue(stateBackend.contains("RocksDBStateBackend")); + } + + + +} + diff --git a/groot-tests/test-e2e-core/src/test/java/com/geedgenetworks/test/e2e/core/Flink13Container.java b/groot-tests/test-e2e-core/src/test/java/com/geedgenetworks/test/e2e/core/Flink13Container.java new file mode 100644 index 0000000..b47dece --- /dev/null +++ b/groot-tests/test-e2e-core/src/test/java/com/geedgenetworks/test/e2e/core/Flink13Container.java @@ -0,0 +1,34 @@ +package com.geedgenetworks.test.e2e.core; + +import lombok.extern.slf4j.Slf4j; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.TestInstance; +import org.testcontainers.containers.Container; + +import java.io.IOException; + +@Slf4j +@TestInstance(TestInstance.Lifecycle.PER_CLASS) +public class Flink13Container extends com.geedgenetworks.test.e2e.common.container.Flink13Container { + @Override + @BeforeAll + public void startUp() throws Exception { + super.startUp(); + log.info("The TestContainer[{}] is running.", identifier()); + } + + @Override + @AfterAll + public void tearDown() throws Exception { + super.tearDown(); + log.info("The TestContainer[{}] is closed.", identifier()); + } + + public Container.ExecResult executeGrootStreamJob(String confFile) + throws IOException, InterruptedException { + return executeJob(confFile); + } + + +} diff --git a/groot-tests/test-e2e-core/src/test/java/com/geedgenetworks/test/e2e/core/InlineToPrintIT.java b/groot-tests/test-e2e-core/src/test/java/com/geedgenetworks/test/e2e/core/InlineToPrintIT.java new file mode 100644 index 0000000..fadf6f3 --- /dev/null +++ b/groot-tests/test-e2e-core/src/test/java/com/geedgenetworks/test/e2e/core/InlineToPrintIT.java @@ -0,0 +1,150 @@ +package com.geedgenetworks.test.e2e.core; + +import com.alibaba.fastjson2.JSON; +import com.alibaba.fastjson2.TypeReference; +import com.alibaba.nacos.client.naming.utils.CollectionUtils; +import com.geedgenetworks.test.e2e.common.TestSuiteBase; +import com.geedgenetworks.test.e2e.common.container.AbstractTestFlinkContainer; +import com.geedgenetworks.test.e2e.common.container.TestContainerId; +import com.geedgenetworks.test.e2e.common.junit.DisabledOnContainer; +import lombok.extern.slf4j.Slf4j; +import org.apache.commons.lang3.StringUtils; +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.TestTemplate; +import java.io.IOException; +import java.util.List; +import java.util.Map; +import java.util.concurrent.CompletableFuture; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.atomic.AtomicReference; + +import static org.awaitility.Awaitility.await; + +@Slf4j +@DisabledOnContainer( + value = {TestContainerId.FLINK_1_17}, + type = {}, + disabledReason = "Only flink adjusts the parameter configuration rules") +public class InlineToPrintIT extends TestSuiteBase { + + + @TestTemplate + public void testJobExecution(AbstractTestFlinkContainer container) { + CompletableFuture.supplyAsync( + () -> { + try { + return container.executeJob("/inline_to_print.yaml"); + } catch (Exception e) { + throw new RuntimeException(e); + } + }); + + AtomicReference taskMangerID = new AtomicReference<>(); + + await().atMost(300000, TimeUnit.MILLISECONDS) + .untilAsserted( + () -> { + Map taskMangerInfo = JSON.parseObject(container.executeJobManagerInnerCommand( + "curl http://localhost:8081/taskmanagers"), new TypeReference>() { + }); + + @SuppressWarnings("unchecked") + List> taskManagers = + (List>) taskMangerInfo.get("taskmanagers"); + + if (!CollectionUtils.isEmpty(taskManagers)) { + taskMangerID.set(taskManagers.get(0).get("id").toString()); + } + Assertions.assertNotNull(taskMangerID.get()); + }); + + AtomicReference jobId = new AtomicReference<>(); + await().atMost(300000, TimeUnit.MILLISECONDS) + .untilAsserted( + () -> { + Map jobInfo = JSON.parseObject(container.executeJobManagerInnerCommand( + "curl http://localhost:8081/jobs/overview"), new TypeReference>() { + }); + @SuppressWarnings("unchecked") + List> jobs = + (List>) jobInfo.get("jobs"); + if (!CollectionUtils.isEmpty(jobs)) { + jobId.set(jobs.get(0).get("jid").toString()); + } + Assertions.assertNotNull(jobId.get()); + }); + + //Obtain job metrics + AtomicReference>> jobNumRestartsReference = new AtomicReference<>(); + await().atMost(60000, TimeUnit.MILLISECONDS) + .untilAsserted( + () -> { + Thread.sleep(5000); + String result = container.executeJobManagerInnerCommand( + String.format( + "curl http://localhost:8081/jobs/%s/metrics?get=numRestarts", jobId.get())); + List> jobNumRestartsInfo = JSON.parseObject(result, new TypeReference>>() { + }); + if (!CollectionUtils.isEmpty(jobNumRestartsInfo)) { + jobNumRestartsReference.set(jobNumRestartsInfo); + } + + Assertions.assertNotNull(jobNumRestartsReference.get()); + + }); + + + } + + @TestTemplate + public void testUserDefinedJobVariables(AbstractTestFlinkContainer container) { + + CompletableFuture.supplyAsync( + () -> { + try { + List variables = List.of( + "hos.bucket.name.rtp_file=cli_job_level_traffic_rtp_file_bucket", + "hos.bucket.name.http_file=cli_job_level_traffic_http_file_bucket"); + return container.executeJob("/inline_to_print.yaml", variables); + } catch (Exception e) { + log.error("Commit task exception : {} ", e.getMessage()); + throw new RuntimeException(e); + } + }); + + + await().atMost(300000, TimeUnit.MILLISECONDS) + .untilAsserted( + () -> { + String logs = container.getServerLogs(); + + Assertions.assertTrue(StringUtils.countMatches(logs, "cli_job_level_traffic_rtp_file_bucket/test_pcap_file") > 10); + Assertions.assertTrue(StringUtils.countMatches(logs, "cli_job_level_traffic_http_file_bucket/test_http_req_file") > 10); + // Test server_ip filter -> output logs not contains 4.4.4.4 of server_ip + Assertions.assertTrue(StringUtils.containsIgnoreCase(logs, "PrintSinkFunction ") && !StringUtils.contains(logs, "\"server_ip\":\"4.4.4.4\"")); + // Test Drop function -> output logs not contains 5.5.5.5 of server_ip + Assertions.assertTrue(StringUtils.containsIgnoreCase(logs, "PrintSinkFunction ") && !StringUtils.contains(logs, "\"server_ip\":\"5.5.5.5\"")); + + // Output logs contains server_asn + Assertions.assertTrue(StringUtils.containsIgnoreCase(logs, "PrintSinkFunction ") && StringUtils.contains(logs, "\"server_asn\"")); + // Output logs contains server_domain + Assertions.assertTrue(StringUtils.containsIgnoreCase(logs, "PrintSinkFunction ") && StringUtils.contains(logs, "\"server_domain\"")); + + // Output logs contains server_country + Assertions.assertTrue(StringUtils.containsIgnoreCase(logs, "PrintSinkFunction ") && StringUtils.contains(logs, "\"server_country\"")); + // Output logs contains mail_attachment_name equals 中文测试 + Assertions.assertTrue(StringUtils.containsIgnoreCase(logs, "PrintSinkFunction ") && StringUtils.contains(logs, "\"mail_attachment_name\":\"中文测试\"")); + // Test EVAL function -> output logs contains direction equals c2s + Assertions.assertTrue(StringUtils.containsIgnoreCase(logs, "PrintSinkFunction ") && StringUtils.contains(logs, "\"direction\":\"c2s\"")); + // Test JSON Extract function -> output logs contains device_group equals XXG-TSG-BJ + Assertions.assertTrue(StringUtils.containsIgnoreCase(logs, "PrintSinkFunction ") && StringUtils.contains(logs, "\"device_group\":\"XXG-TSG-BJ\"")); + + Assertions.assertTrue(StringUtils.containsIgnoreCase(logs, "PrintSinkFunction ") && StringUtils.contains(logs, "client_ip_list")); + + }); + + + + } + +} diff --git a/groot-tests/test-e2e-core/src/test/java/com/geedgenetworks/test/e2e/core/ProcessorIT.java b/groot-tests/test-e2e-core/src/test/java/com/geedgenetworks/test/e2e/core/ProcessorIT.java new file mode 100644 index 0000000..053bad6 --- /dev/null +++ b/groot-tests/test-e2e-core/src/test/java/com/geedgenetworks/test/e2e/core/ProcessorIT.java @@ -0,0 +1,69 @@ +package com.geedgenetworks.test.e2e.core; + +import com.alibaba.fastjson2.JSONObject; +import com.geedgenetworks.test.e2e.common.TestSuiteBase; +import com.geedgenetworks.test.e2e.common.container.AbstractTestFlinkContainer; +import com.geedgenetworks.test.e2e.common.container.ContainerUtil; +import com.geedgenetworks.test.e2e.common.container.TestContainerId; +import com.geedgenetworks.test.e2e.common.junit.DisabledOnContainer; +import lombok.extern.slf4j.Slf4j; +import org.apache.commons.lang3.StringUtils; +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.TestTemplate; + +import java.util.List; +import java.util.concurrent.CompletableFuture; +import java.util.concurrent.TimeUnit; +import java.util.regex.Pattern; + +import static org.awaitility.Awaitility.await; + +@Slf4j +@DisabledOnContainer( + value = {TestContainerId.FLINK_1_17}, + disabledReason = "Override TestSuiteBase @DisabledOnContainer") +public class ProcessorIT extends TestSuiteBase { + + @TestTemplate + public void testJobSplitProcessor(AbstractTestFlinkContainer container) { + CompletableFuture.supplyAsync( + () -> { + try { + return container.executeJob("/job_split_processor.yaml"); + } catch (Exception e) { + throw new RuntimeException(e); + } + }); + + await().atMost(90000, TimeUnit.MILLISECONDS) + .untilAsserted( + () -> { + String logs = container.getServerLogs(); + List result = ContainerUtil.extractJsonFromServerLogs(logs); + Assertions.assertEquals(7, result.size()); + }); + + + } + + + @TestTemplate + public void testJobAggregateProcessor(AbstractTestFlinkContainer container) { + CompletableFuture.supplyAsync( + () -> { + try { + return container.executeJob("/job_aggregate_processor.yaml"); + } catch (Exception e) { + throw new RuntimeException(e); + } + }); + + await().atMost(90000, TimeUnit.MILLISECONDS) + .untilAsserted( + () -> { + String logs = container.getServerLogs(); + List result = ContainerUtil.extractJsonFromServerLogs(logs); + Assertions.assertEquals(4, result.size()); + }); + } +} diff --git a/groot-tests/test-e2e-core/src/test/java/com/geedgenetworks/test/e2e/core/TestUtils.java b/groot-tests/test-e2e-core/src/test/java/com/geedgenetworks/test/e2e/core/TestUtils.java new file mode 100644 index 0000000..7badb2b --- /dev/null +++ b/groot-tests/test-e2e-core/src/test/java/com/geedgenetworks/test/e2e/core/TestUtils.java @@ -0,0 +1,21 @@ +package com.geedgenetworks.test.e2e.core; + +import lombok.extern.slf4j.Slf4j; + +import java.io.File; + +@Slf4j +public class TestUtils { + public static String getResource(String confFile) { + return System.getProperty("user.dir") + + File.separator + + "src" + + File.separator + + "test" + + File.separator + + "resources" + + File.separator + + confFile; + } + +} diff --git a/groot-tests/test-e2e-core/src/test/resources/inline_to_print.yaml b/groot-tests/test-e2e-core/src/test/resources/inline_to_print.yaml new file mode 100644 index 0000000..4e452c7 --- /dev/null +++ b/groot-tests/test-e2e-core/src/test/resources/inline_to_print.yaml @@ -0,0 +1,232 @@ +sources: + inline_source: + type: inline + properties: + data: [{"tcp_rtt_ms":128,"decoded_as":"DNS","rtp_pcap_path":"test_pcap_file", "security_rule_id_list": [1,10,100,300], "http_request_body":"test_http_req_file","http_version":"http1","http_request_line":"GET / HTTP/1.1","http_host":"www.ct.cn","http_url":"www.ct.cn/","ssl_sni":"www.ct.cn", "http_user_agent":"curl/8.0.1","http_status_code":200,"http_response_line":"HTTP/1.1 200 OK","http_response_content_type":"text/html; charset=UTF-8","http_response_latency_ms":31,"http_session_duration_ms":5451,"in_src_mac":"ba:bb:a7:3c:67:1c","in_dest_mac":"86:dd:7a:8f:ae:e2","out_src_mac":"86:dd:7a:8f:ae:e2","out_dest_mac":"ba:bb:a7:3c:67:1c","tcp_client_isn":678677906,"tcp_server_isn":1006700307,"flags":8192, "address_type":4,"mail_subject":"中文标题测试","mail_attachment_name":"5Lit5paH5rWL6K+V","mail_attachment_name_charset": "utf8","device_tag": "{\"tags\":[{\"tag\":\"data_center\",\"value\":\"XXG-TSG-BJ\"},{\"tag\":\"device_group\",\"value\":\"XXG-TSG-BJ\"}]}", "client_ip":"192.11.22.22","server_ip":"8.8.8.8","client_port":42751,"server_port":80,"in_link_id":65535,"out_link_id":65535,"start_timestamp_ms":1703646546127,"end_timestamp_ms":1703646551702,"duration_ms":5575,"sent_pkts":97,"sent_bytes":5892,"received_pkts":250,"received_bytes":333931},{"tcp_rtt_ms":256,"decoded_as":"HTTP","http_version":"http1","http_request_line":"GET / HTTP/1.1","http_host":"www.abc.cn","http_url":"www.cabc.cn/","http_user_agent":"curl/8.0.1","http_status_code":200,"http_response_line":"HTTP/1.1 200 OK","http_response_content_type":"text/html; charset=UTF-8","http_response_latency_ms":31,"http_session_duration_ms":5451,"in_src_mac":"ba:bb:a7:3c:67:1c","in_dest_mac":"86:dd:7a:8f:ae:e2","out_src_mac":"86:dd:7a:8f:ae:e2","out_dest_mac":"ba:bb:a7:3c:67:1c","tcp_client_isn":678677906,"tcp_server_isn":1006700307,"address_type":4,"client_ip":"192.168.10.198","mail_subject":"中文标题测试","server_ip":"4.4.4.4","client_port":42751,"server_port":80,"in_link_id":65535,"out_link_id":65535,"start_timestamp_ms":1703646546127,"end_timestamp_ms":1703646551702,"duration_ms":2575,"sent_pkts":197,"sent_bytes":5892,"received_pkts":350,"received_bytes":533931},{"tcp_rtt_ms":256,"decoded_as":"HTTP","http_version":"http1","mail_subject":"english subject test","http_request_line":"GET / HTTP/1.1","http_host":"www.5555.com","http_url":"www.5555.com/","http_user_agent":"curl/8.0.1","http_status_code":200,"http_response_line":"HTTP/1.1 200 OK","http_response_content_type":"text/html; charset=UTF-8","http_response_latency_ms":31,"http_session_duration_ms":5451,"in_src_mac":"ba:bb:a7:3c:67:1c","in_dest_mac":"86:dd:7a:8f:ae:e2","out_src_mac":"86:dd:7a:8f:ae:e2","out_dest_mac":"ba:bb:a7:3c:67:1c","tcp_client_isn":678677906,"tcp_server_isn":1006700307,"address_type":4,"client_ip":"192.168.10.1","server_ip":"5.5.5.5","client_port":42751,"server_port":53,"in_link_id":65535,"out_link_id":65535,"start_timestamp_ms":1703646546127,"end_timestamp_ms":1703646551702,"duration_ms":2575,"sent_pkts":197,"sent_bytes":5892,"received_pkts":350,"received_bytes":533931},{"tcp_rtt_ms":256,"decoded_as":"HTTP","http_version":"http1","http_request_line":"GET / HTTP/1.1","http_host":"www.6666.cn","http_url":"www.6666.cn/","http_user_agent":"curl/8.0.1","http_status_code":200,"http_response_line":"HTTP/1.1 200 OK","http_response_content_type":"text/html; charset=UTF-8","http_response_latency_ms":31,"http_session_duration_ms":5451,"in_src_mac":"ba:bb:a7:3c:67:1c","in_dest_mac":"86:dd:7a:8f:ae:e2","mail_subject":"中文标题测试","out_src_mac":"86:dd:7a:8f:ae:e2","out_dest_mac":"ba:bb:a7:3c:67:1c","tcp_client_isn":678677906,"tcp_server_isn":1006700307,"address_type":4,"client_ip":"192.168.100.1","server_ip":"6.6.6.6","client_port":42751,"server_port":53,"in_link_id":65535,"out_link_id":65535,"start_timestamp_ms":1703646546127,"end_timestamp_ms":1703646551702,"duration_ms":2575,"sent_pkts":197,"sent_bytes":5892,"received_pkts":350,"received_bytes":533931}] + format: json + json.ignore.parse.errors: false + +filters: + server_ip_filter: + type: filter + properties: + expression: server_ip != '4.4.4.4' + +splits: + decoded_as_split: + type: split + rules: + - tag: http_tag + expression: decoded_as == 'HTTP' + - tag: dns_tag + expression: decoded_as == 'DNS' + + +processing_pipelines: + projection_processor: + type: projection + remove_fields: [http_request_line, http_response_line, http_response_content_type] + functions: + + - function: DROP + filter: server_ip == '5.5.5.5' + + - function: SNOWFLAKE_ID + output_fields: [ log_id ] + parameters: + data_center_id_num: 1 + + - function: UUID + output_fields: [ log_uuid ] + + - function: UUIDv5 + lookup_fields: [ client_ip, server_ip ] + output_fields: [ ip_uuid ] + parameters: + namespace: NAMESPACE_IP + - function: UUIDv7 + output_fields: [ log_uuid_v7 ] + + - function: ASN_LOOKUP + lookup_fields: [ server_ip ] + output_fields: [ server_asn ] + parameters: + kb_name: tsg_ip_asn + option: IP_TO_ASN + + - function: GEOIP_LOOKUP + lookup_fields: [ server_ip ] + output_fields: [ ] + parameters: + kb_name: tsg_ip_location + option: IP_TO_OBJECT + geolocation_field_mapping: + COUNTRY: server_country + PROVINCE: server_super_administrative_area + CITY: server_administrative_area + LONGITUDE: server_longitude + LATITUDE: server_latitude + ISP: server_isp + ORGANIZATION: server_organization + + - function : BASE64_ENCODE_TO_STRING + lookup_fields: [ mail_subject ] + output_fields: [ mail_subject_base64 ] + parameters: + input_type: string + + - function: BASE64_DECODE_TO_STRING + output_fields: [ mail_attachment_name ] + parameters: + value_field: mail_attachment_name + charset_field: mail_attachment_name_charset + - function: CURRENT_UNIX_TIMESTAMP + output_fields: [ current_unix_timestamp_ms ] + parameters: + precision: milliseconds + + - function: DOMAIN + lookup_fields: [ http_host, ssl_sni, quic_sni ] + output_fields: [ server_domain ] + parameters: + option: FIRST_SIGNIFICANT_SUBDOMAIN + + - function: EVAL + output_fields: [ recv_time ] + parameters: + value_expression: current_unix_timestamp_ms + + - function: EVAL + output_fields: [ direction ] + parameters: + value_expression: "(flags & 24576) == 24576 ? 'double' : ((flags & 8192) == 8192 ? 'c2s' : ((flags & 16384) == 16384 ? 's2c' : 'unknown'))" + + - function: EVAL + output_fields: [ constant_value ] + parameters: + value_expression: "'abc'" + + - function: JSON_EXTRACT + lookup_fields: [ device_tag ] + output_fields: [ device_group ] + parameters: + value_expression: $.tags[?(@.tag=='device_group')][0].value + + - function: FLATTEN + lookup_fields: [ device_tag ] + parameters: + prefix: olap + json_string_keys: [device_tag] + + - function: FROM_UNIX_TIMESTAMP + lookup_fields: [ current_unix_timestamp_ms ] + output_fields: [ current_time_str ] + parameters: + precision: milliseconds + + - function: GENERATE_STRING_ARRAY + lookup_fields: [server_ip, server_port] + output_fields: [server_ip_port] + + - function: PATH_COMBINE + lookup_fields: [ rtp_pcap_path ] + output_fields: [ rtp_pcap_path ] + parameters: + path: [ props.hos.path, props.hos.bucket.name.rtp_file, rtp_pcap_path ] + + - function: PATH_COMBINE + lookup_fields: [ http_request_body ] + output_fields: [ http_request_body ] + parameters: + path: [ props.hos.path, props.hos.bucket.name.http_file, http_request_body ] + + - function: RENAME + parameters: + rename_fields: + current_unix_timestamp_ms: processing_time_ms + rename_expression: key = string.replace_all(key,'olap.device_tag.tags','device_tags'); return key; + + - function: UNIX_TIMESTAMP_CONVERTER + lookup_fields: [ __timestamp ] + output_fields: [stat_time_minute] + parameters: + precision: minutes + + dns_table_processor: + type: table + functions: + - function: UNROLL + lookup_fields: [ security_rule_id_list ] + output_fields: [ security_rule_id ] + + dns_aggregate_processor: + type: aggregate + group_by_fields: [ decoded_as ] + window_type: tumbling_processing_time + window_size: 5 + functions: + - function: LONG_COUNT + output_fields: [ count ] + - function: COLLECT_LIST + lookup_fields: [ client_ip ] + output_fields: [ client_ip_list ] + + + +sinks: + global_print_sink: + type: print + properties: + format: json + mode: log_warn + dns_print_sink: + type: print + properties: + format: json + mode: log_warn + http_print_sink: + type: print + properties: + format: json + mode: log_warn + + +application: + env: + name: example-inline-to-print + parallelism: 1 + pipeline: + object-reuse: true + + properties: + hos.path: http://192.168.44.12:9098/hos + hos.bucket.name.troubleshooting_file: troubleshooting_file_bucket + hos.bucket.name.rtp_file: job_level_traffic_rtp_file_bucket + hos.bucket.name.http_file: job_level_traffic_http_file_bucket + hos.bucket.name.eml_file: job_level_traffic_eml_file_bucket + hos.bucket.name.policy_capture_file: job_level_traffic_policy_capture_file_bucket + + topology: + - name: inline_source + downstream: [server_ip_filter] + - name: server_ip_filter + downstream: [ projection_processor ] + - name: projection_processor + downstream: [ global_print_sink, decoded_as_split ] + parallelism: 2 + - name: decoded_as_split + tags: [http_tag, dns_tag] + downstream: [ http_print_sink, dns_table_processor ] + parallelism: 2 + - name: dns_table_processor + downstream: [ dns_aggregate_processor ] + parallelism: 2 + - name: dns_aggregate_processor + downstream: [ dns_print_sink ] + parallelism: 2 + - name: global_print_sink + downstream: [] + - name: http_print_sink + downstream: [] + - name: dns_print_sink + downstream: [] \ No newline at end of file diff --git a/groot-tests/test-e2e-core/src/test/resources/job_aggregate_processor.yaml b/groot-tests/test-e2e-core/src/test/resources/job_aggregate_processor.yaml new file mode 100644 index 0000000..ebd51e3 --- /dev/null +++ b/groot-tests/test-e2e-core/src/test/resources/job_aggregate_processor.yaml @@ -0,0 +1,76 @@ +sources: + inline_source: + type : inline + fields: # [array of object] Field List, if not set, all fields(Map) will be output. + properties: # record 3,4 will be aggreated + data: '[{"pkts":1,"sessions":1,"log_id": 1, "recv_time":"1724925692000","client_ips":["192.168.0.2","192.168.0.1"],"client_ip":"192.168.0.2","server_ip":"2600:1015:b002::"},{"pkts":1,"sessions":1,"decoded_as":null,"log_id": 1, "recv_time":"1724925692000","client_ips":["192.168.0.2","192.168.0.1"], "client_ip":"192.168.0.1","server_ip":"2600:1015:b002::"},{"pkts":2,"sessions":1,"decoded_as":"HTTP","log_id": 2, "recv_time":"1724925692000","client_ips":["192.168.0.2","192.168.0.3"], "client_ip":"192.168.0.2","server_ip":"2600:1015:b002::"},{"sessions":1,"decoded_as":"DNS","log_id": 2, "recv_time":"1724925692000","client_ips":["192.168.0.2","192.168.0.1"], "client_ip":"192.168.0.2","pkts":3,"server_ip":"2600:1015:b002::"},{"sessions":1,"decoded_as":"DNS","log_id": 1,"client_ips":["192.168.0.2","192.168.0.3"], "recv_time":"1724936692000", "client_ip":"192.168.0.2","pkts":4,"server_ip":"2600:1015:b002::"},{"sessions":1,"decoded_as":"HTTP","log_id": 1, "recv_time":"1724937692000", "client_ip":"192.168.0.2","pkts":5,"server_ip":"2600:1015:b002::"}]' + interval.per.row: 1s # 可选 + repeat.count: 1 # 可选 + format: json + json.ignore.parse.errors: false + watermark_timestamp: recv_time + watermark_timestamp_unit: ms + watermark_lag: 10 + +sinks: + print_sink: + type: print + properties: + format: json + mode: log_warn + +postprocessing_pipelines: + + aggregate_processor: + type: aggregate + group_by_fields: [decoded_as] + window_type: tumbling_event_time # tumbling_event_time,sliding_processing_time,sliding_event_time + window_size: 5 + window_timestamp_field: test_time + mini_batch: false + functions: + - function: NUMBER_SUM + lookup_fields: [ sessions ] + - function: MEAN + lookup_fields: [ pkts ] + - function: MAX + lookup_fields: [ pkts ] + output_fields: [ pktsmax ] + - function: MIN + lookup_fields: [ pkts ] + output_fields: [ pktsmin ] + - function: LONG_COUNT + output_fields: [ count ] + - function: COLLECT_LIST + lookup_fields: [ client_ip ] + output_fields: [ client_ip_list ] + - function: COLLECT_SET + lookup_fields: [ server_ip ] + output_fields: [ server_ip_set ] + - function: FIRST_VALUE + lookup_fields: [ log_id ] + output_fields: [ log_id_first ] + - function: LAST_VALUE + lookup_fields: [ log_id ] + output_fields: [ log_id_last ] + - function: COLLECT_SET + lookup_fields: [ client_ips ] + output_fields: [ client_ips_set ] + parameters: + collect_type: array +application: # [object] Application Configuration + env: # [object] Environment Variables + name: groot-stream-job # [string] Job Name + pipeline: + object-reuse: true # [boolean] Object Reuse, default is false + topology: # [array of object] Node List. It will be used build data flow for job dag graph. + - name: inline_source # [string] Node Name, must be unique. It will be used as the name of the corresponding Flink operator. eg. kafka_source the processor type as SOURCE. + parallelism: 1 # [number] Operator-Level Parallelism. + downstream: [ aggregate_processor ] + - name: aggregate_processor + parallelism: 1 + downstream: [ print_sink ] + - name: print_sink + parallelism: 1 + + diff --git a/groot-tests/test-e2e-core/src/test/resources/job_split_processor.yaml b/groot-tests/test-e2e-core/src/test/resources/job_split_processor.yaml new file mode 100644 index 0000000..9dc68ec --- /dev/null +++ b/groot-tests/test-e2e-core/src/test/resources/job_split_processor.yaml @@ -0,0 +1,99 @@ +sources: + inline_source: + type : inline + fields: # [array of object] Field List, if not set, all fields(Map) will be output. + properties: + data: '[{"sessions":1,"mail_attachment_name_charset":"GBK","mail_attachment_name":"aGVsbG8=","packet_capture_file":"test","ssl_sni":"www.google.com","decoded_as":"BASE","ssl_san":"www.google.com","__timestamp":1705568517095,"client_ip":"255.255.255.255","server_ip":"2600:1015:b002::"},{"sessions":1,"decoded_as":"HTTP","log_id": 1, "recv_time":"111", "client_ip":"192.168.0.2","server_ip":"2600:1015:b002::"},{"sessions":1,"decoded_as":"DNS","log_id": 1, "recv_time":"111", "client_ip":"192.168.0.2","server_ip":"2600:1015:b002::"},{"sessions":1,"decoded_as":"DNS","log_id": 1, "recv_time":"111", "client_ip":"192.168.0.2","server_ip":"2600:1015:b002::"}]' + interval.per.row: 1s # 可选 + repeat.count: 1 # 可选 + format: json + json.ignore.parse.errors: false +sinks: + print_sink: + type: print + properties: + format: json + mode: log_warn +splits: + test_split: + type: split + rules: + - tag: http_tag + expression: decoded_as == 'HTTP' + - tag: dns_tag + expression: decoded_as == 'DNS' + +postprocessing_pipelines: + pre_etl_processor: # [object] Processing Pipeline + type: projection + remove_fields: [fields,tags] + output_fields: + functions: # [array of object] Function List + + - function: FLATTEN + lookup_fields: [ fields,tags ] + output_fields: [ ] + parameters: + #prefix: "" + depth: 3 + # delimiter: "." + + - function: RENAME + lookup_fields: [ '' ] + output_fields: [ '' ] + filter: + parameters: + # parent_fields: [tags] + # rename_fields: + # tags: tags + rename_expression: key =string.replace_all(key,'tags.','');key =string.replace_all(key,'fields.','');return key; + + + - function: UNIX_TIMESTAMP_CONVERTER + lookup_fields: [ timestamp_ms ] + output_fields: [ recv_time ] + parameters: + precision: seconds + interval: 300 + # + + aggregate_processor: + type: aggregate + group_by_fields: [decoded_as] + window_type: tumbling_processing_time # tumbling_event_time,sliding_processing_time,sliding_event_time + window_size: 5 + window_timestamp_field: test_time + functions: + - function: NUMBER_SUM + lookup_fields: [ sessions ] + + table_processor: + type: table + functions: + - function: JSON_UNROLL + lookup_fields: [ encapsulation ] + output_fields: [ new_name ] + +application: # [object] Application Configuration + env: # [object] Environment Variables + name: groot-stream-job # [string] Job Name + pipeline: + object-reuse: true # [boolean] Object Reuse, default is false + topology: # [array of object] Node List. It will be used build data flow for job dag graph. + - name: inline_source # [string] Node Name, must be unique. It will be used as the name of the corresponding Flink operator. eg. kafka_source the processor type as SOURCE. + parallelism: 1 # [number] Operator-Level Parallelism. + downstream: [test_split,print_sink] + - name: test_split + tags: [http_tag,dns_tag] + downstream: [ table_processor,pre_etl_processor ] + parallelism: 1 + - name: pre_etl_processor + parallelism: 1 + downstream: [ print_sink ] + - name: table_processor + parallelism: 1 + downstream: [ print_sink ] + - name: print_sink + parallelism: 1 + + diff --git a/groot-tests/test-e2e-core/src/test/resources/test_env_parameter_inline_to_print.yaml b/groot-tests/test-e2e-core/src/test/resources/test_env_parameter_inline_to_print.yaml new file mode 100644 index 0000000..1d09282 --- /dev/null +++ b/groot-tests/test-e2e-core/src/test/resources/test_env_parameter_inline_to_print.yaml @@ -0,0 +1,47 @@ +sources: + inline_source: + type: inline + properties: + data: '[{"tcp_rtt_ms":128,"decoded_as":"HTTP","http_version":"http1","http_request_line":"GET / HTTP/1.1","http_host":"www.ct.cn","http_url":"www.ct.cn/","http_user_agent":"curl/8.0.1","http_status_code":200,"http_response_line":"HTTP/1.1 200 OK","http_response_content_type":"text/html; charset=UTF-8","http_response_latency_ms":31,"http_session_duration_ms":5451,"in_src_mac":"ba:bb:a7:3c:67:1c","in_dest_mac":"86:dd:7a:8f:ae:e2","out_src_mac":"86:dd:7a:8f:ae:e2","out_dest_mac":"ba:bb:a7:3c:67:1c","tcp_client_isn":678677906,"tcp_server_isn":1006700307,"address_type":4,"client_ip":"192.11.22.22","server_ip":"8.8.8.8","client_port":42751,"server_port":80,"in_link_id":65535,"out_link_id":65535,"start_timestamp_ms":1703646546127,"end_timestamp_ms":1703646551702,"duration_ms":5575,"sent_pkts":97,"sent_bytes":5892,"received_pkts":250,"received_bytes":333931},{"tcp_rtt_ms":256,"decoded_as":"HTTP","http_version":"http1","http_request_line":"GET / HTTP/1.1","http_host":"www.abc.cn","http_url":"www.cabc.cn/","http_user_agent":"curl/8.0.1","http_status_code":200,"http_response_line":"HTTP/1.1 200 OK","http_response_content_type":"text/html; charset=UTF-8","http_response_latency_ms":31,"http_session_duration_ms":5451,"in_src_mac":"ba:bb:a7:3c:67:1c","in_dest_mac":"86:dd:7a:8f:ae:e2","out_src_mac":"86:dd:7a:8f:ae:e2","out_dest_mac":"ba:bb:a7:3c:67:1c","tcp_client_isn":678677906,"tcp_server_isn":1006700307,"address_type":4,"client_ip":"192.168.10.198","server_ip":"4.4.4.4","client_port":42751,"server_port":80,"in_link_id":65535,"out_link_id":65535,"start_timestamp_ms":1703646546127,"end_timestamp_ms":1703646551702,"duration_ms":2575,"sent_pkts":197,"sent_bytes":5892,"received_pkts":350,"received_bytes":533931}]' + format: json + json.ignore.parse.errors: false + +sinks: + print_sink: + type: print + properties: + format: json + mode: log_warn + +application: + env: + name: example-inline-to-print + parallelism: 1 + execution: + runtime-mode: streaming + buffer-timeout: 10 + checkpointing: + interval: 10000 + mode: exactly_once + timeout: 1200000 + data-uri: file:///tmp/grootstream/checkpoints + max-concurrent-checkpoints: 2 + cleanup: true + min-pause: 100 + tolerable-failed-checkpoints: 5 + restart: + strategy: fixed-delay + attempts: 2 + delayBetweenAttempts: 1000 + state: + backend: rocksdb + flink: + pipeline: + max-parallelism: 5 + pipeline: + object-reuse: true + topology: + - name: inline_source + downstream: [print_sink] + - name: print_sink + downstream: [] \ No newline at end of file diff --git a/groot-tests/test-e2e-kafka/pom.xml b/groot-tests/test-e2e-kafka/pom.xml deleted file mode 100644 index 3d66b2a..0000000 --- a/groot-tests/test-e2e-kafka/pom.xml +++ /dev/null @@ -1,63 +0,0 @@ - - - 4.0.0 - - com.geedgenetworks - groot-tests - ${revision} - - - test-e2e-kafka - Groot : Tests : E2E : Kafka - - - - - com.geedgenetworks - test-common - ${project.version} - test-jar - test - - - - org.testcontainers - kafka - ${testcontainer.version} - test - - - - com.geedgenetworks - connector-kafka - ${project.version} - test - - - - org.xerial.snappy - snappy-java - test - - - - - - - - - - org.apache.maven.plugins - maven-compiler-plugin - - 9 - 9 - - - - - - - \ No newline at end of file diff --git a/groot-tests/test-e2e-kafka/src/test/java/com/geedgenetworks/test/e2e/kafka/KafkaIT.java b/groot-tests/test-e2e-kafka/src/test/java/com/geedgenetworks/test/e2e/kafka/KafkaIT.java deleted file mode 100644 index e60d34d..0000000 --- a/groot-tests/test-e2e-kafka/src/test/java/com/geedgenetworks/test/e2e/kafka/KafkaIT.java +++ /dev/null @@ -1,374 +0,0 @@ -package com.geedgenetworks.test.e2e.kafka; - -import com.geedgenetworks.formats.json.JsonSerializer; -import com.geedgenetworks.api.connector.type.StructType; -import com.geedgenetworks.api.connector.type.Types; -import com.geedgenetworks.test.common.TestResource; -import com.geedgenetworks.test.common.TestSuiteBase; -import com.geedgenetworks.test.common.container.TestContainer; -import com.geedgenetworks.test.common.container.TestContainerId; -import com.geedgenetworks.test.common.junit.DisabledOnContainer; -import com.google.common.collect.Lists; -import lombok.extern.slf4j.Slf4j; -import org.apache.commons.lang3.StringUtils; -import org.apache.kafka.clients.admin.AdminClientConfig; -import org.apache.kafka.clients.consumer.*; -import org.apache.kafka.clients.producer.KafkaProducer; -import org.apache.kafka.clients.producer.ProducerConfig; -import org.apache.kafka.clients.producer.ProducerRecord; -import org.apache.kafka.common.TopicPartition; -import org.apache.kafka.common.config.SaslConfigs; -import org.apache.kafka.common.serialization.ByteArrayDeserializer; -import org.apache.kafka.common.serialization.ByteArraySerializer; -import org.apache.kafka.common.serialization.StringDeserializer; -import org.awaitility.Awaitility; -import org.junit.jupiter.api.AfterAll; -import org.junit.jupiter.api.Assertions; -import org.junit.jupiter.api.BeforeAll; -import org.junit.jupiter.api.TestTemplate; -import org.testcontainers.containers.Container; -import org.testcontainers.containers.KafkaContainer; -import org.testcontainers.containers.output.Slf4jLogConsumer; -import org.testcontainers.lifecycle.Startables;; -import org.testcontainers.shaded.org.apache.commons.lang3.RandomStringUtils; -import org.testcontainers.utility.DockerImageName; -import org.testcontainers.utility.DockerLoggerFactory; -import org.testcontainers.utility.MountableFile; - -import java.io.IOException; -import java.time.Duration; -import java.util.*; -import java.util.concurrent.CompletableFuture; -import java.util.concurrent.TimeUnit; -import java.util.stream.Stream; - -import static org.awaitility.Awaitility.await; - -@Slf4j -@DisabledOnContainer( - value = {TestContainerId.FLINK_1_17}, - disabledReason = "Override TestSuiteBase @DisabledOnContainer") -public class KafkaIT extends TestSuiteBase implements TestResource { - - private KafkaContainer kafkaContainer; - - private static final String KAFKA_IMAGE_NAME = "confluentinc/cp-kafka:7.4.0"; - private static final String KAFKA_HOST = "kafkaCluster"; - private KafkaProducer producer; - private static final String DEFAULT_TEST_TOPIC_SOURCE = "test_topic_source"; - private static final String DEFAULT_TEST_TOPIC_CONSUME_GROUP = "test-consume-group"; - - @Override - @BeforeAll - public void startUp() { - kafkaContainer = new KafkaContainer(DockerImageName.parse(KAFKA_IMAGE_NAME)) - .withNetwork(NETWORK) - .withNetworkAliases(KAFKA_HOST) - .withEnv("KAFKA_AUTO_CREATE_TOPICS_ENABLE", "true") - .withEnv("KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR", "1") - .withEnv("KAFKA_LISTENER_SECURITY_PROTOCOL_MAP", "PLAINTEXT:SASL_PLAINTEXT,BROKER:SASL_PLAINTEXT") - .withEnv("KAFKA_SASL_MECHANISM_INTER_BROKER_PROTOCOL", "PLAIN") - .withEnv("KAFKA_LISTENER_NAME_PLAINTEXT_SASL_ENABLED_MECHANISMS", "PLAIN") - .withEnv("KAFKA_LISTENER_NAME_BROKER_SASL_ENABLED_MECHANISMS", "PLAIN") - // .withEnv("KAFKA_AUTHORIZER_CLASS_NAME", "kafka.security.authorizer.AclAuthorizer") - .withEnv("KAFKA_SUPER_USERS", "User:admin") - .withEnv("KAFKA_OPTS", "-Djava.security.auth.login.config=/etc/kafka/kafka_server_jaas.conf") - .withCopyFileToContainer(MountableFile.forClasspathResource("kafka_server_jaas.conf"), "/etc/kafka/kafka_server_jaas.conf") - .withCopyFileToContainer(MountableFile.forClasspathResource("kafka_client_jass_cli.properties"), "/etc/kafka/kafka_client_jass_cli.properties") - .withLogConsumer(new Slf4jLogConsumer(DockerLoggerFactory.getLogger(KAFKA_IMAGE_NAME))); - Startables.deepStart(Stream.of(kafkaContainer)).join(); - log.info("Kafka container started successfully"); - Awaitility.given() - .ignoreExceptions() - .atLeast(100, TimeUnit.MILLISECONDS) - .pollInterval(500, TimeUnit.MILLISECONDS) - .atMost(180, TimeUnit.SECONDS) - .untilAsserted(this::initKafkaProducer); - - log.info("Write 100 records to topic test_topic_source"); - generateTestData(DEFAULT_TEST_TOPIC_SOURCE,0, 100); - - - } - - @TestTemplate - public void testKafkaAsSourceConsume(TestContainer container) { - generateTestData("test_topic_json", 0, 10); - CompletableFuture.supplyAsync( - () -> { - try { - return container.executeJob("/kafka_source.yaml"); - } catch (Exception e) { - log.error("Commit task exception :" + e.getMessage()); - throw new RuntimeException(e); - } - }); - - await().atMost(60000, TimeUnit.MILLISECONDS) - .untilAsserted( - () -> { - String logs = container.getServerLogs(); - Assertions.assertEquals(StringUtils.countMatches(logs, "PrintSinkFunction"), 10); - }); - } - - @TestTemplate - public void testKafkaAsSourceConsumeErrorSchema(TestContainer container) { - generateTestData("test_topic_error_json", 0, 10); - CompletableFuture.supplyAsync( - () -> { - try { - Container.ExecResult execResult = container.executeJob("/kafka_source_error_schema.yaml"); - Assertions.assertEquals(0, execResult.getExitCode(), execResult.getStderr()); - return execResult; - } catch (Exception e) { - log.error("Commit task exception :" + e.getMessage()); - throw new RuntimeException(e); - } - }); - - await().atMost(60000, TimeUnit.MILLISECONDS) - .untilAsserted( - () -> { - String logs = container.getServerLogs(); - Assertions.assertTrue(StringUtils.contains(logs, "NumberFormatException")); - }); - } - - @TestTemplate - public void testKafkaAsSink(TestContainer container) throws IOException, InterruptedException { - CompletableFuture.supplyAsync( - () -> { - try { - Container.ExecResult execResult = container.executeJob("/kafka_sink.yaml"); - Assertions.assertEquals(0, execResult.getExitCode(), execResult.getStderr()); - return execResult; - } catch (Exception e) { - log.error("Commit task exception :" + e.getMessage()); - throw new RuntimeException(e); - } - }); - - - List data = Lists.newArrayList(); - await().atMost(60000, TimeUnit.MILLISECONDS) - .untilAsserted( - () -> { - data.addAll(getKafkaConsumerListData("test_sink_topic", null)); - Assertions.assertEquals(10, data.size()); // Check if all 10 records are consumed - }); - - } - - @TestTemplate - public void testKafkaAsSinkProducerQuota(TestContainer container) throws IOException, InterruptedException { - //Create topic with 3 partitions - executeShell("kafka-topics --create --topic SESSION-RECORD-QUOTA-TEST --bootstrap-server kafkaCluster:9092 --partitions 1 --replication-factor 1 --command-config /etc/kafka/kafka_client_jass_cli.properties"); - //Set producer quota to 2KB/s - executeShell("kafka-configs --bootstrap-server kafkaCluster:9092 --alter --add-config 'producer_byte_rate=2048' --entity-type users --entity-name admin --entity-type clients --entity-name SESSION-RECORD-QUOTA-TEST --command-config /etc/kafka/kafka_client_jass_cli.properties "); - - CompletableFuture.supplyAsync( - () -> { - try { - Container.ExecResult execResult = container.executeJob("/kafka_producer_quota.yaml"); - Assertions.assertEquals(0, execResult.getExitCode(), execResult.getStderr()); - return execResult; - } catch (Exception e) { - log.error("Commit task exception :" + e.getMessage()); - throw new RuntimeException(e); - } - }); - - List data = Lists.newArrayList(); - await().atMost(600000, TimeUnit.MILLISECONDS) - .untilAsserted( - () -> { - data.addAll(getKafkaConsumerListData("SESSION-RECORD-QUOTA-TEST", "test-consume-group-quota"+ RandomStringUtils.randomAlphabetic(5))); - Assertions.assertTrue(StringUtils.contains(container.getServerLogs(), "TimeoutException") && data.size()>100); - }); - - } - - - - @TestTemplate - public void testKafkaAsSinkHandleErrorJsonFormat(TestContainer container) throws IOException, InterruptedException { - CompletableFuture. supplyAsync( - () -> { - try { - Container.ExecResult execResult = container.executeJob("/kafka_sink_handle_error_json_format.yaml"); - Assertions.assertEquals(0, execResult.getExitCode(), execResult.getStderr()); - return execResult; - } catch (Exception e) { - log.error("Commit task exception :" + e.getMessage()); - throw new RuntimeException(e); - } - }); - - - List data = Lists.newArrayList(); - await().atMost(60000, TimeUnit.MILLISECONDS) - .untilAsserted( - () -> { - data.addAll(getKafkaConsumerListData("test_handle_error_json_format_topic", null)); - Assertions.assertTrue(StringUtils.contains(container.getServerLogs(), "UnsupportedOperationException")); - Assertions.assertEquals(0, data.size()); - }); - - - - } - - @TestTemplate - public void testKafkaSinkSkipErrorJsonFormat(TestContainer container) throws IOException, InterruptedException { - CompletableFuture.supplyAsync( - () -> { - try { - Container.ExecResult execResult = container.executeJob("/kafka_sink_skip_error_json_format.yaml"); - Assertions.assertEquals(0, execResult.getExitCode(), execResult.getStderr()); - return execResult; - } catch (Exception e) { - log.error("Commit task exception :" + e.getMessage()); - throw new RuntimeException(e); - } - }); - - - List data = Lists.newArrayList(); - await().atMost(60000, TimeUnit.MILLISECONDS) - .untilAsserted( - () -> { - data.addAll(getKafkaConsumerListData("test_skip_error_json_format_topic",null)); - Assertions.assertTrue(StringUtils.contains(container.getServerLogs(), "NullPointerException")); - Assertions.assertEquals(0, data.size()); - }); - } - - private void generateTestData(String topic, int start, int end) { - StructType dataType = Types.parseStructType("id: int, client_ip: string, server_ip: string, flag: string"); - JsonSerializer serializer = new JsonSerializer(dataType); - for (int i = start; i < end; i++) { - Map row = Map - .of("id", i, - "client_ip", "192.168.40.12", - "server_ip", "8.8.8.8" , - "flag", Boolean.FALSE.booleanValue()); - ProducerRecord record = - new ProducerRecord<>(topic, serializer.serialize(row)); - producer.send(record); - } - - } - - - @AfterAll - @Override - public void tearDown() throws Exception { - if (producer != null) { - producer.close(); - } - if (kafkaContainer != null) { - kafkaContainer.close(); - } - - } - - private void initKafkaProducer() { - Properties properties = new Properties(); - String bootstrapServers = kafkaContainer.getBootstrapServers(); - properties.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapServers); - properties.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, ByteArraySerializer.class); - properties.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, ByteArraySerializer.class); - properties.put(AdminClientConfig.SECURITY_PROTOCOL_CONFIG, "SASL_PLAINTEXT"); - properties.put(SaslConfigs.SASL_MECHANISM, "PLAIN"); - properties.put(SaslConfigs.SASL_JAAS_CONFIG, "org.apache.kafka.common.security.plain.PlainLoginModule required username=\"admin\" password=\"admin\";"); - producer = new KafkaProducer<>(properties); - } - - private Properties kafkaConsumerConfig(String consumeGroup) { - Properties properties = new Properties(); - properties.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, kafkaContainer.getBootstrapServers()); - properties.put(ConsumerConfig.GROUP_ID_CONFIG, consumeGroup); - properties.put(AdminClientConfig.SECURITY_PROTOCOL_CONFIG, "SASL_PLAINTEXT"); - properties.put(SaslConfigs.SASL_MECHANISM, "PLAIN"); - properties.put(SaslConfigs.SASL_JAAS_CONFIG, "org.apache.kafka.common.security.plain.PlainLoginModule required username=\"admin\" password=\"admin\";"); - properties.put( - ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, - OffsetResetStrategy.EARLIEST.toString().toLowerCase()); - properties.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class); - properties.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class); - return properties; - } - - private Properties kafkaByteConsumerConfig() { - Properties props = new Properties(); - props.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, kafkaContainer.getBootstrapServers()); - props.put(ConsumerConfig.GROUP_ID_CONFIG, "test-consume-group"); - props.put( - ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, - OffsetResetStrategy.EARLIEST.toString().toLowerCase()); - props.setProperty( - ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, - ByteArrayDeserializer.class.getName()); - props.setProperty( - ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, - ByteArrayDeserializer.class.getName()); - return props; - } - - private Map getKafkaConsumerData(String topicName) { - Map data = new HashMap<>(); - try (KafkaConsumer consumer = new KafkaConsumer<>(kafkaConsumerConfig(DEFAULT_TEST_TOPIC_CONSUME_GROUP))) { - consumer.subscribe(Arrays.asList(topicName)); - Map offsets = - consumer.endOffsets(Arrays.asList(new TopicPartition(topicName, 0))); - Long endOffset = offsets.entrySet().iterator().next().getValue(); - Long lastProcessedOffset = -1L; - - do { - ConsumerRecords records = consumer.poll(Duration.ofMillis(100)); - for (ConsumerRecord record : records) { - if (lastProcessedOffset < record.offset()) { - data.put(record.key(), record.value()); - } - lastProcessedOffset = record.offset(); - } - } while (lastProcessedOffset < endOffset - 1); - } - return data; - } - - private List getKafkaConsumerListData(String topicName, String consumeGroup) { - List data = new ArrayList<>(); - consumeGroup = StringUtils.isBlank(consumeGroup) ? DEFAULT_TEST_TOPIC_CONSUME_GROUP : consumeGroup; - try (KafkaConsumer consumer = new KafkaConsumer<>(kafkaConsumerConfig(consumeGroup))) { - consumer.subscribe(Arrays.asList(topicName)); - Map offsets = - consumer.endOffsets(Arrays.asList(new TopicPartition(topicName, 0))); - Long endOffset = offsets.entrySet().iterator().next().getValue(); - Long lastProcessedOffset = -1L; - - do { - ConsumerRecords records = consumer.poll(Duration.ofMillis(100)); - for (ConsumerRecord record : records) { - if (lastProcessedOffset < record.offset()) { - data.add(record.value()); - } - lastProcessedOffset = record.offset(); - } - } while (lastProcessedOffset < endOffset - 1); - } - return data; - } - - private void executeShell(String command) { - try { - Container.ExecResult result = kafkaContainer.execInContainer("/bin/sh", "-c", command); - log.info("Execute shell command result: {},{}", result.getStdout(), result.getStderr()); - - } catch (Exception e) { - log.error("Execute shell command error: {}", e.getMessage()); - } - } - -} diff --git a/groot-tests/test-e2e-kafka/src/test/resources/kafka_client_jass_cli.properties b/groot-tests/test-e2e-kafka/src/test/resources/kafka_client_jass_cli.properties deleted file mode 100644 index 986cdb9..0000000 --- a/groot-tests/test-e2e-kafka/src/test/resources/kafka_client_jass_cli.properties +++ /dev/null @@ -1,3 +0,0 @@ -security.protocol=SASL_PLAINTEXT -sasl.mechanism=PLAIN -sasl.jaas.config=org.apache.kafka.common.security.plain.PlainLoginModule required username="admin" password="admin"; \ No newline at end of file diff --git a/groot-tests/test-e2e-kafka/src/test/resources/kafka_producer_quota.yaml b/groot-tests/test-e2e-kafka/src/test/resources/kafka_producer_quota.yaml deleted file mode 100644 index 8c2ad8d..0000000 --- a/groot-tests/test-e2e-kafka/src/test/resources/kafka_producer_quota.yaml +++ /dev/null @@ -1,120 +0,0 @@ -sources: # [object] Define connector source - mock_source: - type: mock - properties: - mock.desc.file.path: /tmp/grootstream/config/template/mock_schema/session_record_mock_desc.json - rows.per.second: 10000 - -processing_pipelines: - etl_processor: - type: projection - functions: - - function: SNOWFLAKE_ID - lookup_fields: [''] - output_fields: [log_id] - parameters: - data_center_id_num: 1 - - function: UNIX_TIMESTAMP_CONVERTER - lookup_fields: [ __timestamp ] - output_fields: [ recv_time ] - parameters: - precision: seconds - - function: SNOWFLAKE_ID - lookup_fields: [ '' ] - output_fields: [ session_id ] - parameters: - data_center_id_num: 2 - - function: EVAL - output_fields: [ ingestion_time ] - parameters: - value_expression: recv_time - - - function: DOMAIN - lookup_fields: [ http_host, ssl_sni, dtls_sni, quic_sni ] - output_fields: [ server_domain ] - parameters: - option: FIRST_SIGNIFICANT_SUBDOMAIN - - - function: ASN_LOOKUP - lookup_fields: [ client_ip ] - output_fields: [ client_asn ] - parameters: - kb_name: tsg_ip_asn - option: IP_TO_ASN - - - function: ASN_LOOKUP - lookup_fields: [ server_ip ] - output_fields: [ server_asn ] - parameters: - kb_name: tsg_ip_asn - option: IP_TO_ASN - - - function: GEOIP_LOOKUP - lookup_fields: [ client_ip ] - output_fields: [] - parameters: - kb_name: tsg_ip_location - option: IP_TO_OBJECT - geolocation_field_mapping: - COUNTRY: client_country - PROVINCE: client_super_administrative_area - CITY: client_administrative_area - - - function: GEOIP_LOOKUP - lookup_fields: [ server_ip ] - output_fields: [] - parameters: - kb_name: tsg_ip_location - option: IP_TO_OBJECT - geolocation_field_mapping: - COUNTRY: server_country - PROVINCE: server_super_administrative_area - CITY: server_administrative_area - - - - function: CURRENT_UNIX_TIMESTAMP - output_fields: [ processing_time ] - parameters: - precision: seconds - - -sinks: - print_sink: - type: print - properties: - mode: log_info - format: json - - kafka_sink: - type: kafka - properties: - topic: SESSION-RECORD-QUOTA-TEST - kafka.bootstrap.servers: kafkaCluster:9092 - kafka.client.id: SESSION-RECORD-QUOTA-TEST - kafka.linger.ms: 10 - kafka.request.timeout.ms: 30000 - kafka.batch.size: 262144 - kafka.buffer.memory: 134217728 - kafka.max.request.size: 10485760 - kafka.security.protocol: SASL_PLAINTEXT - kafka.sasl.mechanism: PLAIN - kafka.sasl.jaas.config: org.apache.kafka.common.security.plain.PlainLoginModule required username="admin" password="admin"; - kafka.compression.type: snappy - format: json - json.ignore.parse.errors: false - log.failures.only: true - -application: # [object] Define job configuration - env: - name: kafka_producer_quota - parallelism: 1 - shade.identifier: default - pipeline: - object-reuse: true - topology: - - name: mock_source - downstream: [ etl_processor ] - - name: etl_processor - downstream: [ kafka_sink ] - - name: kafka_sink - downstream: [] \ No newline at end of file diff --git a/groot-tests/test-e2e-kafka/src/test/resources/kafka_server_jaas.conf b/groot-tests/test-e2e-kafka/src/test/resources/kafka_server_jaas.conf deleted file mode 100644 index cb4553f..0000000 --- a/groot-tests/test-e2e-kafka/src/test/resources/kafka_server_jaas.conf +++ /dev/null @@ -1,8 +0,0 @@ -KafkaServer { - org.apache.kafka.common.security.plain.PlainLoginModule required - username="admin" - password="admin" - user_admin="admin" - user_firewall="admin" - user_olap="admin"; -}; diff --git a/groot-tests/test-e2e-kafka/src/test/resources/kafka_sink.yaml b/groot-tests/test-e2e-kafka/src/test/resources/kafka_sink.yaml deleted file mode 100644 index e12e76b..0000000 --- a/groot-tests/test-e2e-kafka/src/test/resources/kafka_sink.yaml +++ /dev/null @@ -1,68 +0,0 @@ -sources: # [object] Define connector source - inline_source: - type: inline - schema: - fields: # [array of object] Schema field projection, support read data only from specified fields. - - name: log_id - type: bigint - - name: recv_time - type: bigint - - name: server_fqdn - type: string - - name: server_domain - type: string - - name: client_ip - type: string - - name: server_ip - type: string - - name: server_asn - type: string - - name: decoded_as - type: string - - name: device_group - type: string - - name: device_tag - type: string - properties: - # - # [string] Event Data, it will be parsed to Map by the specified format. - # - data: '{"recv_time": 1705565615, "log_id":206211012872372224, "tcp_rtt_ms":128,"decoded_as":"HTTP", "http_version":"http1","http_request_line":"GET / HTTP/1.1","http_host":"www.ct.cn","http_url":"www.ct.cn/","http_user_agent":"curl/8.0.1","http_status_code":200,"http_response_line":"HTTP/1.1 200 OK","http_response_content_type":"text/html; charset=UTF-8","http_response_latency_ms":31,"http_session_duration_ms":5451,"in_src_mac":"ba:bb:a7:3c:67:1c","in_dest_mac":"86:dd:7a:8f:ae:e2","out_src_mac":"86:dd:7a:8f:ae:e2","out_dest_mac":"ba:bb:a7:3c:67:1c","tcp_client_isn":678677906,"tcp_server_isn":1006700307,"address_type":4,"client_ip":"192.11.22.22","server_ip":"8.8.8.8","client_port":42751,"server_port":80,"in_link_id":65535,"out_link_id":65535,"start_timestamp_ms":1703646546127,"end_timestamp_ms":1703646551702,"duration_ms":5575,"sent_pkts":97,"sent_bytes":5892,"received_pkts":250,"received_bytes":333931,"tcp_c2s_ip_fragments":0,"tcp_s2c_ip_fragments":0,"tcp_c2s_rtx_pkts":0,"tcp_c2s_rtx_bytes":0,"tcp_s2c_rtx_pkts":0,"tcp_s2c_rtx_bytes":0,"tcp_c2s_o3_pkts":0,"tcp_s2c_o3_pkts":0,"tcp_c2s_lost_bytes":0,"tcp_s2c_lost_bytes":0,"flags":26418,"flags_identify_info":[100,1,100,60,150,100,1,2],"app_transition":"http.1111.test_1_1","decoded_as":"HTTP","server_fqdn":"www.ct.cn","app":"test_1_1","decoded_path":"ETHERNET.IPv4.TCP.http","fqdn_category_list":[1767],"t_vsys_id":1,"vsys_id":1,"session_id":290538039798223400,"tcp_handshake_latency_ms":41,"client_os_desc":"Windows","server_os_desc":"Linux","data_center":"center-xxg-tsgx","device_group":"group-xxg-tsgx","device_tag":"{\"tags\":[{\"tag\":\"data_center\",\"value\":\"center-xxg-tsgx\"},{\"tag\":\"device_group\",\"value\":\"group-xxg-tsgx\"}]}","device_id":"9800165603247024","sled_ip":"192.168.40.39","dup_traffic_flag":0}' - format: json - interval.per.row: 1s - repeat.count: 10 - json.ignore.parse.errors: false - - -sinks: - connector_kafka: - type: kafka - properties: - topic: test_sink_topic - kafka.bootstrap.servers: kafkaCluster:9092 - kafka.client.id: test_sink_topic - kafka.security.protocol: SASL_PLAINTEXT - kafka.sasl.mechanism: PLAIN - kafka.sasl.jaas.config: org.apache.kafka.common.security.plain.PlainLoginModule required username="admin" password="admin"; - kafka.retries: 0 - kafka.linger.ms: 10 - kafka.request.timeout.ms: 30000 - kafka.batch.size: 262144 - kafka.buffer.memory: 134217728 - kafka.max.request.size: 10485760 - kafka.compression.type: snappy - format: json - log.failures.only: true - -application: # [object] Define job configuration - env: - name: example-inline-to-kafka - parallelism: 1 - shade.identifier: default - pipeline: - object-reuse: true - topology: - - name: inline_source - downstream: [ connector_kafka ] - - name: connector_kafka - downstream: [] \ No newline at end of file diff --git a/groot-tests/test-e2e-kafka/src/test/resources/kafka_sink_handle_error_json_format.yaml b/groot-tests/test-e2e-kafka/src/test/resources/kafka_sink_handle_error_json_format.yaml deleted file mode 100644 index d65157a..0000000 --- a/groot-tests/test-e2e-kafka/src/test/resources/kafka_sink_handle_error_json_format.yaml +++ /dev/null @@ -1,67 +0,0 @@ -sources: # [object] Define connector source - inline_source: - type: inline - schema: - fields: # [array of object] Schema field projection, support read data only from specified fields. - - name: log_id - type: bigint - - name: recv_time - type: bigint - - name: server_fqdn - type: string - - name: server_domain - type: string - - name: client_ip - type: string - - name: server_ip - type: string - - name: server_asn - type: string - - name: decoded_as - type: string - - name: device_group - type: string - - name: device_tag - type: bigint - properties: - # - # [string] Event Data, it will be parsed to Map by the specified format. - # - data: '{"recv_time": 1705565615, "log_id":206211012872372224, "tcp_rtt_ms":128,"decoded_as":"HTTP", "http_version":"http1","http_request_line":"GET / HTTP/1.1","http_host":"www.ct.cn","http_url":"www.ct.cn/","http_user_agent":"curl/8.0.1","http_status_code":200,"http_response_line":"HTTP/1.1 200 OK","http_response_content_type":"text/html; charset=UTF-8","http_response_latency_ms":31,"http_session_duration_ms":5451,"in_src_mac":"ba:bb:a7:3c:67:1c","in_dest_mac":"86:dd:7a:8f:ae:e2","out_src_mac":"86:dd:7a:8f:ae:e2","out_dest_mac":"ba:bb:a7:3c:67:1c","tcp_client_isn":678677906,"tcp_server_isn":1006700307,"address_type":4,"client_ip":"192.11.22.22","server_ip":"8.8.8.8","client_port":42751,"server_port":80,"in_link_id":65535,"out_link_id":65535,"start_timestamp_ms":1703646546127,"end_timestamp_ms":1703646551702,"duration_ms":5575,"sent_pkts":97,"sent_bytes":5892,"received_pkts":250,"received_bytes":333931,"tcp_c2s_ip_fragments":0,"tcp_s2c_ip_fragments":0,"tcp_c2s_rtx_pkts":0,"tcp_c2s_rtx_bytes":0,"tcp_s2c_rtx_pkts":0,"tcp_s2c_rtx_bytes":0,"tcp_c2s_o3_pkts":0,"tcp_s2c_o3_pkts":0,"tcp_c2s_lost_bytes":0,"tcp_s2c_lost_bytes":0,"flags":26418,"flags_identify_info":[100,1,100,60,150,100,1,2],"app_transition":"http.1111.test_1_1","decoded_as":"HTTP","server_fqdn":"www.ct.cn","app":"test_1_1","decoded_path":"ETHERNET.IPv4.TCP.http","fqdn_category_list":[1767],"t_vsys_id":1,"vsys_id":1,"session_id":290538039798223400,"tcp_handshake_latency_ms":41,"client_os_desc":"Windows","server_os_desc":"Linux","data_center":"center-xxg-tsgx","device_group":"group-xxg-tsgx","device_tag":"{\"tags\":[{\"tag\":\"data_center\",\"value\":\"center-xxg-tsgx\"},{\"tag\":\"device_group\",\"value\":\"group-xxg-tsgx\"}]}","device_id":"9800165603247024","sled_ip":"192.168.40.39","dup_traffic_flag":0}' - format: json - interval.per.row: 1s - repeat.count: 10 - json.ignore.parse.errors: false - - -sinks: - connector_kafka: - type: kafka - properties: - topic: test_handle_error_json_format_topic - kafka.bootstrap.servers: kafkaCluster:9092 - kafka.security.protocol: SASL_PLAINTEXT - kafka.sasl.mechanism: PLAIN - kafka.sasl.jaas.config: org.apache.kafka.common.security.plain.PlainLoginModule required username="admin" password="admin"; - kafka.retries: 0 - kafka.linger.ms: 10 - kafka.request.timeout.ms: 30000 - kafka.batch.size: 262144 - kafka.buffer.memory: 134217728 - kafka.max.request.size: 10485760 - kafka.compression.type: snappy - format: json - log.failures.only: true - -application: # [object] Define job configuration - env: - name: example-inline-to-kafka - parallelism: 1 - shade.identifier: default - pipeline: - object-reuse: true - topology: - - name: inline_source - downstream: [ connector_kafka ] - - name: connector_kafka - downstream: [] \ No newline at end of file diff --git a/groot-tests/test-e2e-kafka/src/test/resources/kafka_sink_skip_error_json_format.yaml b/groot-tests/test-e2e-kafka/src/test/resources/kafka_sink_skip_error_json_format.yaml deleted file mode 100644 index d9cb80f..0000000 --- a/groot-tests/test-e2e-kafka/src/test/resources/kafka_sink_skip_error_json_format.yaml +++ /dev/null @@ -1,67 +0,0 @@ -sources: # [object] Define connector source - inline_source: - type: inline - schema: - fields: # [array of object] Schema field projection, support read data only from specified fields. - - name: log_id - type: bigint - - name: recv_time - type: bigint - - name: server_fqdn - type: string - - name: server_domain - type: string - - name: client_ip - type: string - - name: server_ip - type: string - - name: server_asn - type: string - - name: decoded_as - type: string - - name: device_group - type: string - - name: device_tag - type: bigint - properties: - # - # [string] Event Data, it will be parsed to Map by the specified format. - # - data: '{"recv_time": 1705565615, "log_id":206211012872372224, "tcp_rtt_ms":128,"decoded_as":"HTTP", "http_version":"http1","http_request_line":"GET / HTTP/1.1","http_host":"www.ct.cn","http_url":"www.ct.cn/","http_user_agent":"curl/8.0.1","http_status_code":200,"http_response_line":"HTTP/1.1 200 OK","http_response_content_type":"text/html; charset=UTF-8","http_response_latency_ms":31,"http_session_duration_ms":5451,"in_src_mac":"ba:bb:a7:3c:67:1c","in_dest_mac":"86:dd:7a:8f:ae:e2","out_src_mac":"86:dd:7a:8f:ae:e2","out_dest_mac":"ba:bb:a7:3c:67:1c","tcp_client_isn":678677906,"tcp_server_isn":1006700307,"address_type":4,"client_ip":"192.11.22.22","server_ip":"8.8.8.8","client_port":42751,"server_port":80,"in_link_id":65535,"out_link_id":65535,"start_timestamp_ms":1703646546127,"end_timestamp_ms":1703646551702,"duration_ms":5575,"sent_pkts":97,"sent_bytes":5892,"received_pkts":250,"received_bytes":333931,"tcp_c2s_ip_fragments":0,"tcp_s2c_ip_fragments":0,"tcp_c2s_rtx_pkts":0,"tcp_c2s_rtx_bytes":0,"tcp_s2c_rtx_pkts":0,"tcp_s2c_rtx_bytes":0,"tcp_c2s_o3_pkts":0,"tcp_s2c_o3_pkts":0,"tcp_c2s_lost_bytes":0,"tcp_s2c_lost_bytes":0,"flags":26418,"flags_identify_info":[100,1,100,60,150,100,1,2],"app_transition":"http.1111.test_1_1","decoded_as":"HTTP","server_fqdn":"www.ct.cn","app":"test_1_1","decoded_path":"ETHERNET.IPv4.TCP.http","fqdn_category_list":[1767],"t_vsys_id":1,"vsys_id":1,"session_id":290538039798223400,"tcp_handshake_latency_ms":41,"client_os_desc":"Windows","server_os_desc":"Linux","data_center":"center-xxg-tsgx","device_group":"group-xxg-tsgx","device_tag":"{\"tags\":[{\"tag\":\"data_center\",\"value\":\"center-xxg-tsgx\"},{\"tag\":\"device_group\",\"value\":\"group-xxg-tsgx\"}]}","device_id":"9800165603247024","sled_ip":"192.168.40.39","dup_traffic_flag":0}' - format: json - interval.per.row: 1s - repeat.count: 10 - json.ignore.parse.errors: true - - -sinks: - connector_kafka: - type: kafka - properties: - topic: test_skip_error_json_format_topic - kafka.bootstrap.servers: kafkaCluster:9092 - kafka.security.protocol: SASL_PLAINTEXT - kafka.sasl.mechanism: PLAIN - kafka.sasl.jaas.config: org.apache.kafka.common.security.plain.PlainLoginModule required username="admin" password="admin"; - kafka.retries: 0 - kafka.linger.ms: 10 - kafka.request.timeout.ms: 30000 - kafka.batch.size: 262144 - kafka.buffer.memory: 134217728 - kafka.max.request.size: 10485760 - kafka.compression.type: snappy - format: json - log.failures.only: true - -application: # [object] Define job configuration - env: - name: example-inline-to-kafka - parallelism: 1 - shade.identifier: default - pipeline: - object-reuse: true - topology: - - name: inline_source - downstream: [ connector_kafka ] - - name: connector_kafka - downstream: [] \ No newline at end of file diff --git a/groot-tests/test-e2e-kafka/src/test/resources/kafka_source.yaml b/groot-tests/test-e2e-kafka/src/test/resources/kafka_source.yaml deleted file mode 100644 index 3403ab9..0000000 --- a/groot-tests/test-e2e-kafka/src/test/resources/kafka_source.yaml +++ /dev/null @@ -1,41 +0,0 @@ -sources: - kafka_source: - type : kafka - schema: - fields: # [array of object] Schema field projection, support read data only from specified fields. - - name: client_ip - type: string - - name: server_ip - type: string - properties: # [object] Kafka source properties - topic: test_topic_json - kafka.bootstrap.servers: kafkaCluster:9092 - kafka.session.timeout.ms: 60000 - kafka.max.poll.records: 3000 - kafka.max.partition.fetch.bytes: 31457280 - kafka.security.protocol: SASL_PLAINTEXT - kafka.sasl.mechanism: PLAIN - kafka.sasl.jaas.config: org.apache.kafka.common.security.plain.PlainLoginModule required username="admin" password="admin"; - kafka.group.id: test_topic_json_group - kafka.auto.offset.reset: earliest - format: json - -sinks: # [object] Define connector sink - print_sink: - type: print - properties: - mode: log_warn - format: json - -application: # [object] Define job configuration - env: - name: example-kafka-to-print - parallelism: 1 - shade.identifier: default - pipeline: - object-reuse: true - topology: - - name: kafka_source - downstream: [print_sink] - - name: print_sink - downstream: [] \ No newline at end of file diff --git a/groot-tests/test-e2e-kafka/src/test/resources/kafka_source_error_schema.yaml b/groot-tests/test-e2e-kafka/src/test/resources/kafka_source_error_schema.yaml deleted file mode 100644 index 1016560..0000000 --- a/groot-tests/test-e2e-kafka/src/test/resources/kafka_source_error_schema.yaml +++ /dev/null @@ -1,42 +0,0 @@ -sources: - kafka_source: - type : kafka - properties: # [object] Kafka source properties - topic: test_topic_error_json - kafka.bootstrap.servers: kafkaCluster:9092 - kafka.security.protocol: SASL_PLAINTEXT - kafka.sasl.mechanism: PLAIN - kafka.sasl.jaas.config: org.apache.kafka.common.security.plain.PlainLoginModule required username="admin" password="admin"; - kafka.session.timeout.ms: 60000 - kafka.max.poll.records: 3000 - kafka.max.partition.fetch.bytes: 31457280 - kafka.group.id: test_topic_error_json_group - kafka.auto.offset.reset: earliest - format: json - -sinks: # [object] Define connector sink - print_sink: - type: print - schema: - fields: # [array of object] Schema field projection, support read data only from specified fields. - - name: client_ip - type: string - - name: server_ip - type: bigint - properties: - mode: log_warn - format: json - json.ignore.parse.errors: false - - -application: # [object] Define job configuration - env: - name: example-kafka-to-print - parallelism: 1 - pipeline: - object-reuse: true - topology: - - name: kafka_source - downstream: [print_sink] - - name: print_sink - downstream: [] \ No newline at end of file -- cgit v1.2.3