summaryrefslogtreecommitdiff
path: root/docs/connector
diff options
context:
space:
mode:
authordoufenghu <[email protected]>2024-01-24 20:48:58 +0800
committerdoufenghu <[email protected]>2024-01-24 20:48:58 +0800
commiteb055c2917289b4ce8df0935a43b0b13d87bd561 (patch)
tree44280757727685b133b985f2e57942a5d5498dc5 /docs/connector
parent85ed9c9c8a2cba8268ab7facf169fa2eae9cdea1 (diff)
[Improve][docs] Add test connectors Inline.md, Print.md and filter for Aviator.md.
Diffstat (limited to 'docs/connector')
-rw-r--r--docs/connector/sink/ClickHouse.md2
-rw-r--r--docs/connector/sink/Kafka.md6
-rw-r--r--docs/connector/sink/Print.md46
-rw-r--r--docs/connector/source/IPFIX.md31
-rw-r--r--docs/connector/source/Inline.md51
-rw-r--r--docs/connector/source/Kafka.md6
6 files changed, 135 insertions, 7 deletions
diff --git a/docs/connector/sink/ClickHouse.md b/docs/connector/sink/ClickHouse.md
index 21d5de9..79ba1db 100644
--- a/docs/connector/sink/ClickHouse.md
+++ b/docs/connector/sink/ClickHouse.md
@@ -30,7 +30,7 @@ In order to use the ClickHouse connector, the following dependencies are require
| ClickHouse | Universal | [Download](http://192.168.40.153:8099/service/local/repositories/platform-release/content/com/geedgenetworks/connector-clickhouse/) |
-ClickHouse sink customizes properties. if properties belongs to ClickHouse JDBC Config, you can use `connection.` prefix to set.
+ClickHouse sink custom properties. If properties belongs to ClickHouse JDBC Config, you can use `connection.` prefix to set.
| Name | Type | Required | Default | Description |
|-----------------------|----------|----------|---------|--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
diff --git a/docs/connector/sink/Kafka.md b/docs/connector/sink/Kafka.md
index 4073912..04dae2f 100644
--- a/docs/connector/sink/Kafka.md
+++ b/docs/connector/sink/Kafka.md
@@ -9,7 +9,7 @@ In order to use the Kafka connector, the following dependencies are required. Th
|------------|--------------------|--------------------------------------------------------------------------------------------------------------------------------|
| Kafka | Universal | [Download](http://192.168.40.153:8099/service/local/repositories/platform-release/content/com/geedgenetworks/connector-kafka/) |
-Kafka sink customizes properties. if properties belongs to Kafka Producer Config, you can use `kafka.` prefix to set.
+Kafka sink custom properties. if properties belongs to Kafka Producer Config, you can use `kafka.` prefix to set.
| Name | Type | Required | Default | Description |
|-------------------------|--------|----------|---------|---------------------------------------------------------------------------------------------------------------------------------------------------------------|
@@ -54,8 +54,8 @@ sources: # [object] Define connector source
format: json
json.ignore.parse.errors: false
-sinks:
- connector_kafka:
+sinks: # [object] Define connector sink
+ connector_kafka: # [object] Kafka sink connector name
type: kafka
properties:
topic: SESSION-RECORD-TEST
diff --git a/docs/connector/sink/Print.md b/docs/connector/sink/Print.md
new file mode 100644
index 0000000..271d7a2
--- /dev/null
+++ b/docs/connector/sink/Print.md
@@ -0,0 +1,46 @@
+# Print
+> Print sink connector
+## Description
+Print sink connector is used to print data to console. It is useful for testing.
+
+## Sink Options
+Print sink custom properties.
+
+| Name | Type | Required | Default | Description |
+|-----------------|----------|----------|---------|-----------------------------------------------------------------------------------|
+| format | String | Yes | - | Data format. The Optional values are `json`, `protobuf`. |
+| [format].config | Map | No | - | Data format properties. Please refer to [Format Options](../formats) for details. |
+| mode | String | No | stdout | Print mode. The Optional values are `stdout`, `log_info`, `log_warn`, `null`. |
+
+## Example
+This example read data of inline test source and print to console.
+```yaml
+sources:
+ inline_source:
+ type: inline
+ properties:
+ data: '[{"tcp_rtt_ms":128,"decoded_as":"HTTP","http_version":"http1","http_request_line":"GET / HTTP/1.1","http_host":"www.ct.cn","http_url":"www.ct.cn/","http_user_agent":"curl/8.0.1","http_status_code":200,"http_response_line":"HTTP/1.1 200 OK","http_response_content_type":"text/html; charset=UTF-8","http_response_latency_ms":31,"http_session_duration_ms":5451,"in_src_mac":"ba:bb:a7:3c:67:1c","in_dest_mac":"86:dd:7a:8f:ae:e2","out_src_mac":"86:dd:7a:8f:ae:e2","out_dest_mac":"ba:bb:a7:3c:67:1c","tcp_client_isn":678677906,"tcp_server_isn":1006700307,"address_type":4,"client_ip":"192.11.22.22","server_ip":"8.8.8.8","client_port":42751,"server_port":80,"in_link_id":65535,"out_link_id":65535,"start_timestamp_ms":1703646546127,"end_timestamp_ms":1703646551702,"duration_ms":5575,"sent_pkts":97,"sent_bytes":5892,"received_pkts":250,"received_bytes":333931},{"tcp_rtt_ms":256,"decoded_as":"HTTP","http_version":"http1","http_request_line":"GET / HTTP/1.1","http_host":"www.abc.cn","http_url":"www.cabc.cn/","http_user_agent":"curl/8.0.1","http_status_code":200,"http_response_line":"HTTP/1.1 200 OK","http_response_content_type":"text/html; charset=UTF-8","http_response_latency_ms":31,"http_session_duration_ms":5451,"in_src_mac":"ba:bb:a7:3c:67:1c","in_dest_mac":"86:dd:7a:8f:ae:e2","out_src_mac":"86:dd:7a:8f:ae:e2","out_dest_mac":"ba:bb:a7:3c:67:1c","tcp_client_isn":678677906,"tcp_server_isn":1006700307,"address_type":4,"client_ip":"192.168.10.198","server_ip":"4.4.4.4","client_port":42751,"server_port":80,"in_link_id":65535,"out_link_id":65535,"start_timestamp_ms":1703646546127,"end_timestamp_ms":1703646551702,"duration_ms":2575,"sent_pkts":197,"sent_bytes":5892,"received_pkts":350,"received_bytes":533931}]'
+ format: json
+ json.ignore.parse.errors: false
+ interval.per.row: 5s
+ repeat.count: 10
+
+sinks:
+ print_sink: # [object] Print sink connector name
+ type: print
+ properties:
+ format: json
+ mode: log_warn
+
+application:
+ env:
+ name: example-inline-to-print
+ parallelism: 3
+ pipeline:
+ object-reuse: true
+ topology:
+ - name: inline_source
+ downstream: [print_sink]
+ - name: print_sink
+ downstream: []
+``` \ No newline at end of file
diff --git a/docs/connector/source/IPFIX.md b/docs/connector/source/IPFIX.md
index e69de29..550a5ab 100644
--- a/docs/connector/source/IPFIX.md
+++ b/docs/connector/source/IPFIX.md
@@ -0,0 +1,31 @@
+# IPFIX
+> IPFIX source collector
+## Description
+IPFIX source collector is used to collect IPFIX data from network devices. It supports UDP protocol.
+## Source Options
+In order to use the IPFIX connector, the following dependencies are required. They can be download by Nexus Maven Repository.
+
+| Datasource | Supported Versions | Maven |
+|------------|--------------------|------------------------------------------------------------------------------------------------------------------------------------------|
+| IPFIX | Universal | [Download](http://192.168.40.153:8099/service/local/repositories/platform-release/content/com/geedgenetworks/connector-ipfix-collector/) |
+
+IPFIX source custom properties.
+
+| Name | Type | Required | Default | Description |
+|-----------------------------------------|---------|----------|-----------|---------------------------------------------------------------------------------------------------------------------------------------------------------------|
+| .port.range | Integer | Yes | - | UDP port range |
+| buffer.size | Integer | No | 65535 | The maximum size of packet for UDP |
+| receive.buffer.size | Integer | No | 104857600 | UDP receive buffer size in bytes |
+| service.discovery.registry.mode | String | No | - | Service discovery registry mode, support `0(nacos)` and `1(consul)` |
+| service.discovery.service.name | String | No | - | Service discovery service name |
+| service.discovery.health.check.interval | Integer | No | - | Service discovery health check interval in milliseconds |
+| service.discovery.nacos.server.addr | String | No | - | A list of host/port pairs to use for establishing the initial connection to the Nacos cluster. This list should be in the form `host1:port1,host2:port2,...`. |
+| service.discovery.nacos.username | String | No | - | The username to use to connect to Nacos.Need specify `ipfix.service.discovery.registry.mode` to `0(nacos)` |
+| service.discovery.nacos.password | String | No | - | The password to use to connect to Nacos.Need specify `ipfix.service.discovery.registry.mode` to `0(nacos)` |
+| service.discovery.nacos.namespace | String | No | - | The namespace to use to connect to Nacos.Need specify `ipfix.service.discovery.registry.mode` to `0(nacos)` |
+| service.discovery.nacos.group | String | No | - | The group to use to connect to Nacos.Need specify `ipfix.service.discovery.registry.mode` to `0(nacos)` |
+| service.discovery.consul.server.addr | String | No | - | Consul server address. Need specify `ipfix.service.discovery.registry.mode` to `1(consul)` |
+| service.discovery.consul.server.port | Integer | No | - | Consul server port. Need specify `ipfix.service.discovery.registry.mode` to `1(consul)` |
+| service.discovery.consul.token | String | No | - | Consul token. Need specify `ipfix.service.discovery.registry.mode` to `1(consul)` |
+
+## Example \ No newline at end of file
diff --git a/docs/connector/source/Inline.md b/docs/connector/source/Inline.md
new file mode 100644
index 0000000..c91d1a7
--- /dev/null
+++ b/docs/connector/source/Inline.md
@@ -0,0 +1,51 @@
+# Inline
+> Inline source connector
+## Description
+Inline source connector is used to generate data from inline configuration. It is useful for testing.
+## Source Options
+Inline source custom properties. The data will be parsed to Map<String, Object> by the specified format.
+
+| Name | Type | Required | Default | Description |
+|------------------|-----------|----------|---------|--------------------------------------------------------------------------------------------------------------------------|
+| data | String | Yes | - | Testing data, support json and protobuf format. If data is json format , it can be parsed multiple events by json array. |
+| format | String | Yes | - | Data format. The Optional values are `json`, `protobuf`. |
+| [format].config | Map | No | - | Data format properties. Please refer to [Format Options](../formats) for details. |
+| interval.per.row | Duration | No | 1s | Interval time in seconds between each row. If it is 0, it will be generated as fast as possible. |
+| repeat.count | Integer | No | -1 | Repeat count. If it is -1, it will be generated infinitely. |
+| type | String | No | string | Data codec type. The Optional values are `string(UTF-8)`, `hex`, `base64`. |
+
+## Example
+
+This example read data of inline test source and print to console.
+
+```yaml
+sources:
+ inline_source: # [object] Inline source connector name
+ type: inline
+ properties:
+ data: '[{"tcp_rtt_ms":128,"decoded_as":"HTTP","http_version":"http1","http_request_line":"GET / HTTP/1.1","http_host":"www.ct.cn","http_url":"www.ct.cn/","http_user_agent":"curl/8.0.1","http_status_code":200,"http_response_line":"HTTP/1.1 200 OK","http_response_content_type":"text/html; charset=UTF-8","http_response_latency_ms":31,"http_session_duration_ms":5451,"in_src_mac":"ba:bb:a7:3c:67:1c","in_dest_mac":"86:dd:7a:8f:ae:e2","out_src_mac":"86:dd:7a:8f:ae:e2","out_dest_mac":"ba:bb:a7:3c:67:1c","tcp_client_isn":678677906,"tcp_server_isn":1006700307,"address_type":4,"client_ip":"192.11.22.22","server_ip":"8.8.8.8","client_port":42751,"server_port":80,"in_link_id":65535,"out_link_id":65535,"start_timestamp_ms":1703646546127,"end_timestamp_ms":1703646551702,"duration_ms":5575,"sent_pkts":97,"sent_bytes":5892,"received_pkts":250,"received_bytes":333931},{"tcp_rtt_ms":256,"decoded_as":"HTTP","http_version":"http1","http_request_line":"GET / HTTP/1.1","http_host":"www.abc.cn","http_url":"www.cabc.cn/","http_user_agent":"curl/8.0.1","http_status_code":200,"http_response_line":"HTTP/1.1 200 OK","http_response_content_type":"text/html; charset=UTF-8","http_response_latency_ms":31,"http_session_duration_ms":5451,"in_src_mac":"ba:bb:a7:3c:67:1c","in_dest_mac":"86:dd:7a:8f:ae:e2","out_src_mac":"86:dd:7a:8f:ae:e2","out_dest_mac":"ba:bb:a7:3c:67:1c","tcp_client_isn":678677906,"tcp_server_isn":1006700307,"address_type":4,"client_ip":"192.168.10.198","server_ip":"4.4.4.4","client_port":42751,"server_port":80,"in_link_id":65535,"out_link_id":65535,"start_timestamp_ms":1703646546127,"end_timestamp_ms":1703646551702,"duration_ms":2575,"sent_pkts":197,"sent_bytes":5892,"received_pkts":350,"received_bytes":533931}]'
+ format: json
+ json.ignore.parse.errors: false
+ interval.per.row: 5s
+ repeat.count: 10
+
+sinks:
+ print_sink:
+ type: print
+ properties:
+ format: json
+
+application:
+ env:
+ name: example-inline-to-print
+ parallelism: 3
+ pipeline:
+ object-reuse: true
+ topology:
+ - name: inline_source
+ downstream: [print_sink]
+ - name: print_sink
+ downstream: []
+```
+
+
diff --git a/docs/connector/source/Kafka.md b/docs/connector/source/Kafka.md
index 4d9b34d..0565fd4 100644
--- a/docs/connector/source/Kafka.md
+++ b/docs/connector/source/Kafka.md
@@ -9,7 +9,7 @@ In order to use the Kafka connector, the following dependencies are required. Th
|------------|--------------------|--------------------------------------------------------------------------------------------------------------------------------|
| Kafka | Universal | [Download](http://192.168.40.153:8099/service/local/repositories/platform-release/content/com/geedgenetworks/connector-kafka/) |
-Kafka source customizes properties. if properties belongs to Kafka Consumer Config, you can use `kafka.` prefix to set.
+Kafka source custom properties. if properties belongs to Kafka Consumer Config, you can use `kafka.` prefix to set.
| Name | Type | Required | Default | Description |
|-------------------------|--------|----------|--------------|---------------------------------------------------------------------------------------------------------------------------------------------------------------|
@@ -22,8 +22,8 @@ Kafka source customizes properties. if properties belongs to Kafka Consumer Conf
## Example
This example read data of kafka topic `SESSION-RECORD` and print to console.
```yaml
-sources:
- kafka_source:
+sources: # [object] Define connector source
+ kafka_source: # [object] Kafka source connector name
type : kafka
fields: # [array of object] Schema field projection, support read data only from specified fields.
- name: client_ip