summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorfumingwei <[email protected]>2023-09-14 10:03:52 +0800
committerfumingwei <[email protected]>2023-09-19 15:47:45 +0800
commit4fd06167f2f7a513d646c7698949430ff703ae82 (patch)
tree6f25eb56a265bd43434564e8d9e56e21ebcfcc03
parent54f0acdc824bdb9ca282d2d64f953c519539f975 (diff)
feature:新增python exporter
-rw-r--r--src/exporter/local_exporter.py349
-rw-r--r--src/exporter/prometheus_exporter.py245
-rw-r--r--test/CMakeLists.txt3
-rw-r--r--test/test_exporter_python.cpp159
-rw-r--r--test/test_prometheus_exporter.py100
5 files changed, 855 insertions, 1 deletions
diff --git a/src/exporter/local_exporter.py b/src/exporter/local_exporter.py
new file mode 100644
index 0000000..7226082
--- /dev/null
+++ b/src/exporter/local_exporter.py
@@ -0,0 +1,349 @@
+#!/usr/bin/python3
+
+import argparse
+import sys
+import json
+import re
+import os
+import logging
+import shutil
+import datetime
+import time
+from prettytable import PrettyTable,NONE,HEADER
+
+import ctypes
+
+libfieldstat = ctypes.CDLL('libfieldstat4.so')
+libfieldstat.histogram_base64_decode.argtypes = [ctypes.c_char_p]
+libfieldstat.histogram_base64_decode.restype = ctypes.c_void_p
+
+libfieldstat.histogram_free.argtypes = [ctypes.c_void_p]
+
+libfieldstat.histogram_value_at_percentile.argtypes = [ctypes.c_void_p, ctypes.c_double]
+libfieldstat.histogram_value_at_percentile.restype = ctypes.c_longlong
+
+libfieldstat.histogram_count_le_value.argtypes = [ctypes.c_void_p, ctypes.c_longlong]
+libfieldstat.histogram_count_le_value.restype = ctypes.c_longlong
+
+libfieldstat.histogram_value_total_count.argtypes = [ctypes.c_void_p]
+libfieldstat.histogram_value_total_count.restype = ctypes.c_longlong
+
+libfieldstat.histogram_value_min.argtypes = [ctypes.c_void_p]
+libfieldstat.histogram_value_min.restype = ctypes.c_longlong
+
+libfieldstat.histogram_value_max.argtypes = [ctypes.c_void_p]
+libfieldstat.histogram_value_max.restype = ctypes.c_longlong
+
+libfieldstat.histogram_value_mean.argtypes = [ctypes.c_void_p]
+libfieldstat.histogram_value_mean.restype = ctypes.c_double
+
+libfieldstat.histogram_value_stddev.argtypes = [ctypes.c_void_p]
+libfieldstat.histogram_value_stddev.restype = ctypes.c_double
+
+libfieldstat.hll_base64_to_count.argtypes = [ctypes.c_char_p]
+libfieldstat.hll_base64_to_count.restype = ctypes.c_double
+
+logging.basicConfig(level=logging.DEBUG, format='%(asctime)s - %(message)s')
+
+'''
+[
+ {
+ "name": "-",
+ "tags": {
+ "send_log": "sum",
+ "policy_id": 1,
+ "quanlity": 0.500000
+ },
+ "fields": {
+ "T_success_log": 1
+ },
+ "timestamp": 1694657637836
+ },
+ {
+ "name": "-",
+ "tags": {
+ "send_log": "SECURITY-EVENT",
+ "policy_id": 1,
+ "quanlity": 0.500000
+ },
+ "fields": {
+ "T_success_log": 1
+ },
+ "timestamp": 1694657637836
+ }
+]
+'''
+
+class LocalExporterVars:
+ terminal_size = 1
+ json_path = ""
+ hist_format = ""
+ hist_bins = []
+
+class CounterTable:
+ INFO_COLUMN_WIDTH = 11 # len(speed/s) + 2 * (" ") + 2 * ("|")
+ COLUMM_PADDING = 3 # 2 *(" ") + "|"
+
+ def __init__(self):
+ self.column_size = LocalExporterVars.terminal_size - self.INFO_COLUMN_WIDTH
+ self.info_column = ("", ["tags", "sum", "speed/s"])
+ self.min_width = self.INFO_COLUMN_WIDTH
+ self.tables = []
+ self.columns = []
+
+ def build_table_column(self, tags, key, value):
+ column = (key, [tags, str(value), ""])
+ self.columns.append(column)
+ self.min_width = max(self.min_width, len(tags), len(key), len(str(value)))
+
+ def __build_one_table(self, columns_slice):
+ table = PrettyTable()
+ table.vrules = NONE
+ table.hrules = NONE
+ # append info column into table
+ table.add_column(self.info_column[0], self.info_column[1], align = "l" )
+ for item in columns_slice:
+ # append data column into table
+ table.add_column(item[0], item[1], align="r")
+ # parameters max length
+ table.min_width[item[0]] = self.min_width
+
+ return table
+
+ def __build_tables(self):
+ # One table print in screen size
+ # One table per screen size
+ table_size = self.column_size // (self.min_width + self.COLUMM_PADDING)
+ if 0 == table_size:
+ table_size = 1
+ n_columns = len(self.columns)
+
+ for i in range(0, n_columns, table_size):
+ table = None
+ l_edge = i
+ r_edge = min(i + table_size, n_columns)
+
+ if r_edge >= n_columns:
+ table = self.__build_one_table(self.columns[l_edge:])
+ else:
+ table = self.__build_one_table(self.columns[l_edge:r_edge])
+
+ self.tables.append(table)
+
+ def print_tables(self):
+ self.__build_tables()
+ for item in self.tables:
+ print(item)
+
+
+class HistogramTable:
+ def __init__(self):
+ self.format = LocalExporterVars.hist_format
+ self.bins = LocalExporterVars.hist_bins
+ self.tables = []
+
+ def __build_summary_format(self, c_hist, table):
+ for i in LocalExporterVars.hist_bins:
+ header = "{:.2f}%".format(i * 100)
+ row = libfieldstat.histogram_value_at_percentile(c_hist, float(i * 100))
+ table.add_column(header, [row])
+
+ def __build_histogram_format(self, c_hist, table):
+ for i in LocalExporterVars.hist_bins:
+ header = "le={:d}".format(i)
+ row = libfieldstat.histogram_value_at_percentile(c_hist, int(i))
+ table.add_column(header, [row])
+
+
+ def build_table(self, tags, key, value):
+ table = PrettyTable()
+ c_hist = libfieldstat.histogram_base64_decode(value.encode('utf-8'))
+ if self.format == "summary":
+ self.__build_summary_format(c_hist, table)
+ if self.format == "histogram":
+ self.__build_histogram_format(c_hist, table)
+
+ max_value = libfieldstat.histogram_value_max(c_hist)
+ table.add_column("MAX", [max_value])
+
+ min_value = libfieldstat.histogram_value_min(c_hist)
+ table.add_column("MIN", [min_value])
+
+ avg_value = libfieldstat.histogram_value_mean(c_hist)
+ table.add_column("AVG", ["{:.2f}".format(avg_value)])
+
+ dev_value = libfieldstat.histogram_value_stddev(c_hist)
+ table.add_column("STDDEV", ["{:.2f}".format(dev_value)])
+
+ cnt_value = libfieldstat.histogram_value_total_count(c_hist)
+ table.add_column("CNT", [cnt_value])
+
+ libfieldstat.histogram_free(c_hist)
+
+ table.title = key + " " + tags
+ self.tables.append(table)
+
+ def print_tables(self):
+ for item in self.tables:
+ print(item)
+
+class JsonDataParser:
+ def __init__(self):
+ self.json_path = LocalExporterVars.json_path
+ self.ctable = CounterTable()
+ self.htable = HistogramTable()
+ self.hlltable = CounterTable()
+
+ def __dealwith_counter(self, tags, key, value):
+ self.ctable.build_table_column(tags, key, value)
+
+ def __dealwith_histogram(self, tags, key, value):
+ self.htable.build_table(tags, key, value)
+
+ def __dealwith_hll(self, tags, key, value):
+ hll_value = libfieldstat.hll_base64_to_count(value.encode('utf-8'))
+ self.hlltable.build_table_column(tags, key, "{:.2f}".format(hll_value))
+
+ def __parse_json_tags(self, json_object):
+ tags_dict = json_object["tags"]
+ tags_dict.update({"app_name": json_object["name"]})
+ return json.dumps(tags_dict)
+
+ def __parse_json_object(self, json_object):
+ tags = self.__parse_json_tags(json_object)
+ fields = json_object["fields"]
+
+ for key,value in fields.items():
+ if isinstance(value, int):
+ self.__dealwith_counter(tags, key, value)
+ else:
+ is_hll = libfieldstat.is_hll(value.encode('utf-8'))
+ if is_hll:
+ self.__dealwith_hll(tags, key, value)
+ else:
+ self.__dealwith_histogram(tags, key, value)
+
+ def parse_data(self):
+ if not os.path.exists(self.json_path):
+ logging.error("Path: {%s} does not exist", self.json_path)
+ return
+ with open(self.json_path) as file:
+ data = json.load(file)
+ for json_object in data:
+ self.__parse_json_object(json_object)
+
+ def __print_top_edge(self):
+ timestamp = datetime.datetime.now().timestamp()
+ formatted_time = datetime.datetime.fromtimestamp(timestamp).strftime('%a %b %d %H:%M:%S %Y')
+
+ num_of_equals = (LocalExporterVars.terminal_size - len(formatted_time)) // 2
+
+ result = '=' * num_of_equals + formatted_time + '=' * num_of_equals
+ print(result)
+
+ def __print_bottom_edge(self):
+ print('-' * LocalExporterVars.terminal_size)
+
+ def print_data(self):
+ self.__print_top_edge()
+ self.ctable.print_tables()
+ print("\n")
+ self.htable.print_tables()
+ print("\n")
+ self.hlltable.print_tables()
+ self.__print_bottom_edge()
+
+ @classmethod
+ def run_json_data_parser(cls):
+ praser = cls()
+ praser.parse_data()
+ praser.print_data()
+
+class LocalExporter:
+ DEFAULT_HIST_BINS = [0.1,0.5,0.8,0.9,0.95,0.99]
+ DEFAULT_HIST_FORMAT = "summary"
+ DEFAULT_JSON_PATH = "./fieldstat.json"
+ DEFAULT_INTERVAL_S = 1
+
+ def __init__(self):
+ self.is_loop = False
+ self.interval_s = self.DEFAULT_INTERVAL_S
+
+ def __verify_cmd_args(self, args):
+ if args.hist_format not in ["summary", "histogram"]:
+ logging.error("When historgram format must be 'summary' or 'histogram'")
+ return -1
+
+ bins = args.hist_bins.split(',')
+
+ if args.hist_format == "summary":
+ for bin in bins:
+ if not re.match(r'^0\.([1-9]|[0][1-9]|[1-9]\d)$', bin):
+ logging.error("When historgram format is %s, bins value in [0.01-0.99], "
+ "bins format example: 0.1,0,2,0.3", args.hist_format)
+ return -1
+ if args.hist_format == "histogram":
+ for bin in bins:
+ if not re.match(r'^\d+(\.\d+)?$', bin):
+ logging.error("When historgram format is %s, bins value is integer or decimal, "
+ "bins format example: 0.1,1,10,100,1000", args.hist_format)
+ return -1
+ return 0
+
+ def __parse_bins_str(self, bins_str):
+ bins = []
+ for item in bins_str.split(','):
+ bins.append(float(item))
+ return list(set(bins))
+
+ def read_cmd_options(self):
+ bins_str = ','.join([str(x) for x in self.DEFAULT_HIST_BINS])
+
+ parser = argparse.ArgumentParser(description='Fieldstat local exporter')
+
+ parser.add_argument("-b", "--hist-bins", type = str, default = bins_str,
+ help = "The metrics of histogram type output bins.")
+ parser.add_argument("-f", "--hist-format", type = str, default = self.DEFAULT_HIST_FORMAT,
+ help = "The metrics of histogram type output format.")
+ parser.add_argument("-j", "--json-path", type = str, default = self.DEFAULT_JSON_PATH,
+ help = "The input fieldstat metrics json file path.")
+ parser.add_argument("-i", "--interval", type = int, default = self.DEFAULT_INTERVAL_S,
+ help = "interval, seconds to wait between print.")
+
+ parser.add_argument("-l", "--loops", action='store_true', default = False,
+ help = "print loop, exit when recv a signal.")
+
+ args = parser.parse_args()
+
+ if -1 == self.__verify_cmd_args(args):
+ parser.print_help()
+ sys.exit(1)
+
+ LocalExporterVars.hist_format = args.hist_format
+ LocalExporterVars.json_path = args.json_path
+ LocalExporterVars.hist_bins = self.__parse_bins_str(args.hist_bins)
+
+ self.interval_s = args.interval
+ self.is_loop = args.loops
+
+ def local_export(self):
+ try:
+ while True:
+ LocalExporterVars.terminal_size, _ = shutil.get_terminal_size((128, 64))
+ JsonDataParser.run_json_data_parser()
+ if not self.is_loop:
+ break;
+ time.sleep(self.interval_s)
+ except KeyboardInterrupt:
+ pass
+
+ @classmethod
+ def run_local_exporter(cls):
+ exporter = cls()
+ exporter.read_cmd_options()
+ exporter.local_export()
+
+
+
+if __name__ == '__main__':
+ LocalExporter.run_local_exporter()
diff --git a/src/exporter/prometheus_exporter.py b/src/exporter/prometheus_exporter.py
new file mode 100644
index 0000000..66a57a8
--- /dev/null
+++ b/src/exporter/prometheus_exporter.py
@@ -0,0 +1,245 @@
+#!/usr/bin/python3
+
+import argparse
+import sys
+import json
+import re
+import os
+from http.server import HTTPServer, BaseHTTPRequestHandler
+
+import ctypes
+
+libfieldstat = ctypes.CDLL('libfieldstat4.so')
+libfieldstat.histogram_base64_decode.argtypes = [ctypes.c_char_p]
+libfieldstat.histogram_base64_decode.restype = ctypes.c_void_p
+
+libfieldstat.histogram_free.argtypes = [ctypes.c_void_p]
+
+libfieldstat.histogram_value_at_percentile.argtypes = [ctypes.c_void_p, ctypes.c_double]
+libfieldstat.histogram_value_at_percentile.restype = ctypes.c_longlong
+
+libfieldstat.histogram_count_le_value.argtypes = [ctypes.c_void_p, ctypes.c_longlong]
+libfieldstat.histogram_count_le_value.restype = ctypes.c_longlong
+
+libfieldstat.histogram_value_total_count.argtypes = [ctypes.c_void_p]
+libfieldstat.histogram_value_total_count.restype = ctypes.c_longlong
+
+libfieldstat.histogram_value_sum.argtypes = [ctypes.c_void_p]
+libfieldstat.histogram_value_sum.restype = ctypes.c_longlong
+
+libfieldstat.hll_base64_to_count.argtypes = [ctypes.c_char_p]
+libfieldstat.hll_base64_to_count.restype = ctypes.c_double
+
+import logging
+logging.basicConfig(level=logging.DEBUG, format='%(asctime)s - %(message)s')
+
+class PrometheusExporterVars:
+ hist_bins = []
+ hist_format = ""
+ json_path = ""
+ uri_path = ""
+
+class BuildPrometheusMetrics:
+ def __init__(self):
+ self.hist_bins = PrometheusExporterVars.hist_bins
+ self.hist_format = PrometheusExporterVars.hist_format
+ self.json_path = PrometheusExporterVars.json_path
+
+ def __escape_metric_name(self, metric_name):
+ #regex: [a-zA-Z_:][a-zA-Z0-9_:]*
+ escaped_name = re.sub(r'[^a-zA-Z0-9_:]', '_', metric_name)
+ return escaped_name
+
+ def __escape_metric_tags(self, json_dict):
+ #regex: [a-zA-Z_][a-zA-Z0-9_]*
+ escaped_tags = []
+ tags_dict = json_dict["tags"]
+ tags_dict["app_name"] = json_dict["name"]
+
+ for key,value in tags_dict.items():
+ escaped_key = re.sub(r'[^a-zA-Z0-9_:]', '_', key)
+ value_str = str(value)
+ escaped_tags.append(f'{escaped_key}="{value_str}"')
+
+ return ','.join(escaped_tags)
+
+ def __build_type_counter(self, name, tags, value):
+ return self.__escape_metric_name(name) + "{" + tags + "}" + ' ' + str(value) + '\n'
+
+
+ def __build_histogram_format(self, name, tags, c_hist):
+ metrics = ""
+ for i in self.hist_bins:
+ value = libfieldstat.histogram_count_le_value(c_hist, int(i))
+ metric = name + "_bucket" + "{" + tags + ",le=\"{:.2f}\"".format(i) + "}" + ' ' + str(value) + '\n'
+ metrics += metric
+
+ return metrics
+
+ def __build_summary_format(self, name, tags, c_hist):
+ metrics = ""
+ for i in self.hist_bins:
+ value = libfieldstat.histogram_value_at_percentile(c_hist, float(i * 100))
+ metric = name + "{" + tags + ",quantile=\"{:.2f}%\"".format(i * 100) + "}" + ' ' + str(value) + '\n'
+ metrics += metric
+
+ return metrics
+
+ def __build_type_histogram(self, name, tags, value):
+ escaped_name = self.__escape_metric_name(name)
+ metrics = ""
+
+ c_hist = libfieldstat.histogram_base64_decode(value.encode('utf-8'))
+
+ if self.hist_format == "summary":
+ metrics += self.__build_summary_format(escaped_name, tags, c_hist)
+ if self.hist_format == "histogram":
+ metrics += self.__build_histogram_format(escaped_name, tags, c_hist)
+
+ sum_value = libfieldstat.histogram_value_sum(c_hist)
+ metrics += escaped_name + "_sum" + "{" + tags + "}" + ' ' + str(sum_value) + '\n'
+
+ cnt_value = libfieldstat.histogram_value_total_count(c_hist)
+ metrics += escaped_name + "_count" + "{" + tags + "}" + ' ' + str(cnt_value) + '\n'
+
+ libfieldstat.histogram_free(c_hist)
+
+ return metrics
+
+
+ def __build_type_hll(self, name, tags, value):
+ hll_value = libfieldstat.hll_base64_to_count(value.encode('utf-8'))
+ return name + "{" + tags + "}" + ' ' + "{:.2f}".format(hll_value) + '\n'
+
+ def __build_metrics(self, json_dict):
+ metrics = ""
+ metric_tags = self.__escape_metric_tags(json_dict)
+ for key,value in json_dict["fields"].items():
+ if isinstance(value, int):
+ metrics += self.__build_type_counter(key, metric_tags, value)
+ else:
+ is_hll = libfieldstat.is_hll(value.encode('utf-8'))
+ if is_hll:
+ metrics += self.__build_type_hll(key, metric_tags, value)
+ else:
+ metrics += self.__build_type_histogram(key, metric_tags, value)
+ return metrics
+
+ def build_metrics_payload(self):
+ payload = ""
+
+ if not os.path.exists(self.json_path):
+ logging.error("Path: {%s} does not exist", self.json_path)
+ return payload
+
+ with open(self.json_path) as file:
+ json_data = json.load(file)
+ for item in json_data:
+ payload += self.__build_metrics(item)
+
+ return payload
+
+ @classmethod
+ def run_build_metrics(cls):
+ builder = cls()
+ return builder.build_metrics_payload()
+
+
+class PrometheusEndpoint(BaseHTTPRequestHandler):
+ def __init__(self, request, client_address, server):
+ self.desired_path = PrometheusExporterVars.uri_path
+ super().__init__(request, client_address, server)
+
+ def do_GET(self):
+ if self.path == self.desired_path:
+ resp = BuildPrometheusMetrics.run_build_metrics()
+ self.send_response(200)
+ self.send_header('Content-type', 'text/plain; version=0.0.4')
+ self.end_headers()
+ self.wfile.write(resp.encode())
+ else:
+ self.send_error(404)
+ self.end_headers()
+
+class PrometheusExporter:
+ DEFAULT_LISTEN_PORT = 8080
+ DEFAULT_HIST_BINS = [0.1,0.5,0.8,0.9,0.95,0.99]
+ DEFAULT_HIST_FORMAT = "summary"
+ DEFAULT_JSON_PATH = "./fieldstat.json"
+ DEFAULT_URI_PATH = "/metrics"
+
+ def __init__(self):
+ self.listen_port = self.DEFAULT_LISTEN_PORT
+
+ def __verify_cmd_args(self, args):
+ if args.hist_format not in ["summary", "histogram"]:
+ logging.error("When historgram format must be 'summary' or 'histogram'")
+ return -1
+
+ bins = args.hist_bins.split(',')
+
+ if args.hist_format == "summary":
+ for bin in bins:
+ if not re.match(r'^0\.([1-9]|[0][1-9]|[1-9]\d)$', bin):
+ logging.error("When historgram format is %s, bins value in [0.01-0.99], "
+ "bins format example: 0.1,0,2,0.3", args.hist_format)
+ return -1
+ if args.hist_format == "histogram":
+ for bin in bins:
+ if not re.match(r'^\d+(\.\d+)?$', bin):
+ logging.error("When historgram format is %s, bins value is integer or decimal, "
+ "bins format example: 0.1,1,10,100,1000", args.hist_format)
+ return -1
+ return 0
+
+ def __parse_bins_str(self, bins_str):
+ bins = []
+ for item in bins_str.split(','):
+ bins.append(float(item))
+
+ n_bins = list(set(bins))
+ n_bins.sort()
+ return bins
+
+ def read_cmd_options(self):
+ bins_str = ','.join([str(x) for x in self.DEFAULT_HIST_BINS])
+
+ parser = argparse.ArgumentParser(description='Fieldstat prometheus exporter.')
+
+ parser.add_argument("-p", "--listen-port", type = int, default = self.DEFAULT_LISTEN_PORT,
+ help = "Specify the prometheus endpoint port to listen. i.e., 80,8080")
+ parser.add_argument("-u", "--uri-path", type = str, default = self.DEFAULT_URI_PATH,
+ help = "Specify the prometheus endpoint uri path.")
+ parser.add_argument("-b", "--hist-bins", type = str, default = bins_str,
+ help = "The metrics of histogram type output bins.")
+ parser.add_argument("-f", "--hist-format", type = str, default = self.DEFAULT_HIST_FORMAT,
+ help = "The metrics of histogram type output format.")
+ parser.add_argument("-j", "--json-path", type = str, default = self.DEFAULT_JSON_PATH,
+ help = "The input fieldstat metrics json file path.")
+
+ args = parser.parse_args()
+
+ if -1 == self.__verify_cmd_args(args):
+ parser.print_help()
+ sys.exit(1)
+
+ self.listen_port = args.listen_port
+ PrometheusExporterVars.uri_path = args.uri_path
+ PrometheusExporterVars.hist_bins = self.__parse_bins_str(args.hist_bins)
+ PrometheusExporterVars.hist_format = args.hist_format
+ PrometheusExporterVars.json_path = args.json_path
+
+ def enable_prometheus_endpoint(self):
+ server_address = ('', self.listen_port)
+ httpd = HTTPServer(server_address, PrometheusEndpoint)
+ httpd.serve_forever()
+
+ @classmethod
+ def run_prometheus_endpoints(cls):
+ exporter = cls()
+ exporter.read_cmd_options()
+ exporter.enable_prometheus_endpoint()
+
+
+if __name__ == '__main__':
+ PrometheusExporter.run_prometheus_endpoints() \ No newline at end of file
diff --git a/test/CMakeLists.txt b/test/CMakeLists.txt
index cf40b3f..3252602 100644
--- a/test/CMakeLists.txt
+++ b/test/CMakeLists.txt
@@ -56,4 +56,5 @@ add_unit_test(test_metric_hll)
add_unit_test(test_performance)
add_unit_test(test_register_and_reset)
add_unit_test(test_serialize)
-add_unit_test(unit_test_cell_manager) \ No newline at end of file
+add_unit_test(unit_test_cell_manager)
+add_unit_test(test_exporter_python) \ No newline at end of file
diff --git a/test/test_exporter_python.cpp b/test/test_exporter_python.cpp
new file mode 100644
index 0000000..f81ae0b
--- /dev/null
+++ b/test/test_exporter_python.cpp
@@ -0,0 +1,159 @@
+#include <gtest/gtest.h>
+
+#include "utils.hpp"
+#include "cjson/cJSON.h"
+
+#include "fieldstat.h"
+#include "fieldstat_exporter.h"
+
+static struct fieldstat *get_hll_fieldsstat()
+{
+ struct fieldstat_tag shared_tags[1];
+
+ shared_tags[0].key = "rule_id";
+ shared_tags[0].type = TAG_INTEGER;
+ shared_tags[0].value_longlong = 1;
+
+ const char *hll_name[] = {"external_ip", "internal_ip", "acc_ip"};
+
+ struct fieldstat *instance = fieldstat_new();
+ EXPECT_NE(nullptr, instance);
+ int cube_id = fieldstat_register_cube(instance, shared_tags, 1,
+ SAMPLING_MODE_COMPREHENSIVE, 100);
+
+ for(unsigned int i = 0; i < sizeof(hll_name) / sizeof(hll_name[0]); i++)
+ {
+ int hll_id = fieldstat_register_hll(instance, cube_id, hll_name[i], 5);
+ int cell_id = fieldstat_cube_add(instance, cube_id, NULL, 0, 1000);
+
+ for(int j = 0; j < 100; j++)
+ {
+ char ip_str[64] = {0};
+ memset(ip_str, 0, sizeof(ip_str));
+ snprintf(ip_str, sizeof(ip_str), "192.168.%u.%d", i, j);
+ int ret = fieldstat_hll_add(instance, cube_id, hll_id, cell_id, ip_str, strlen(ip_str));
+ EXPECT_EQ(0, ret);
+ }
+
+ }
+
+ return instance;
+}
+
+
+static struct fieldstat *get_hist_fieldstat()
+{
+ struct fieldstat_tag shared_tags[2];
+ struct fieldstat_tag cell_tags[2];
+
+ shared_tags[0].key = "rule_id";
+ shared_tags[0].type = TAG_INTEGER;
+ shared_tags[0].value_longlong = 1;
+ shared_tags[1].key = "action";
+ shared_tags[1].type = TAG_CSTRING;
+ shared_tags[1].value_str = "deny";
+
+ cell_tags[0].key = "thread_id";
+ cell_tags[0].type = TAG_INTEGER;
+ cell_tags[0].value_longlong = 1;
+ cell_tags[1].key = "hit_rate";
+ cell_tags[1].type = TAG_DOUBLE;
+ cell_tags[1].value_double = 1.1;
+
+ const char *hist_names[] = {"list_num", "max_wt_ms", "ivt_nx_itv_ms",
+ "bye_pv_itv_ms", "sess_num/udp", "ivt/udp",
+ "bye/udp", "oth_mtd/udp"};
+
+
+ struct fieldstat *instance = fieldstat_new();
+ EXPECT_NE(nullptr, instance);
+ int cube_id = fieldstat_register_cube(instance, shared_tags, 2,
+ SAMPLING_MODE_COMPREHENSIVE, 100);
+ EXPECT_EQ(0, cube_id);
+
+ for(unsigned int i = 0; i < sizeof(hist_names)/sizeof(hist_names[0]); i++)
+ {
+ int hist_id = fieldstat_register_hist(instance, cube_id, hist_names[i], 1, 600000, 3);
+
+ int cell_id = fieldstat_cube_add(instance, cube_id, cell_tags, 2, 1000);
+ for(int j = 0; j < 100; j++)
+ {
+ fieldstat_hist_record(instance, cube_id, hist_id, cell_id, i*100 + j);
+ }
+ }
+
+ return instance;
+}
+
+
+
+static struct fieldstat *get_table_fieldstat()
+{
+ struct fieldstat_tag shared_tags[2];
+ shared_tags[0].key = "policy_id";
+ shared_tags[0].type = TAG_INTEGER;
+ shared_tags[0].value_longlong = 1;
+ shared_tags[1].key = "quanlity";
+ shared_tags[1].type = TAG_DOUBLE;
+ shared_tags[1].value_double = 0.5;
+
+ const char *cell_tag_value[] = {
+ "sum", "SECURITY-EVENT", "SESSION-RECORD", "INTERNAL-RTP-RECORD",
+ "VOIP-RECORD", "INTERIM-SESSION-RECORD", "TRANSACTION-RECORD",
+ "GTPC-RECORD", "BGP-RECORD", "PROXY-EVENT", "DOS-SKETCH-RECORD",
+ "STATISTICS-RULE-METRIC", "OBJECT-STATISTICS-METRIC"};
+
+ struct fieldstat_tag cell_tags;
+ cell_tags.key = "send_log";
+ cell_tags.type = TAG_CSTRING;
+ cell_tags.value_str = "true";
+
+ struct fieldstat *instance = fieldstat_new();
+ EXPECT_NE(nullptr, instance);
+
+ int cube_id = fieldstat_register_cube(instance, shared_tags, 2,
+ SAMPLING_MODE_COMPREHENSIVE, 100);
+ EXPECT_EQ(0, cube_id);
+
+ int counter_id_0 = fieldstat_register_counter(instance, cube_id,
+ "T_success_log",
+ COUNTER_MERGE_BY_SUM);
+
+ int counter_id_1 = fieldstat_register_counter(instance, cube_id,
+ "T_fail_log",
+ COUNTER_MERGE_BY_SUM);
+
+ for(unsigned int i = 0; i < sizeof(cell_tag_value)/sizeof(cell_tag_value[0]); i++)
+ {
+ cell_tags.value_str = cell_tag_value[i];
+ int cell_id_0 = fieldstat_cube_add(instance, cube_id, &cell_tags, 1, 1);
+ fieldstat_counter_incrby(instance, cube_id, counter_id_0, cell_id_0, 1);
+ if(i < 5)
+ fieldstat_counter_incrby(instance, cube_id, counter_id_1, cell_id_0, 2);
+ }
+
+ return instance;
+}
+
+TEST(ExporterLocal, TableBuild)
+{
+ struct timeval current = {100, 10000};
+ struct fieldstat *merger = fieldstat_new();
+ struct fieldstat *hll = get_hll_fieldsstat();
+ struct fieldstat *hist = get_hist_fieldstat();
+ struct fieldstat *table = get_table_fieldstat();
+
+ fieldstat_merge(merger, hll);
+ fieldstat_merge(merger, hist);
+ fieldstat_merge(merger, table);
+ struct fieldstat_json_exporter *exporter = fieldstat_json_exporter_new(hll);
+ char *str_json = fieldstat_json_exporter_export(exporter, &current);
+ printf(str_json);
+}
+
+
+int main(int argc, char *argv[])
+{
+ testing::InitGoogleTest(&argc, argv);
+ return RUN_ALL_TESTS();
+}
diff --git a/test/test_prometheus_exporter.py b/test/test_prometheus_exporter.py
new file mode 100644
index 0000000..82f6b2e
--- /dev/null
+++ b/test/test_prometheus_exporter.py
@@ -0,0 +1,100 @@
+import unittest
+import sys
+import urllib
+from urllib.request import urlopen
+from threading import Thread
+from http.server import HTTPServer, BaseHTTPRequestHandler
+
+sys.path.append('../src/exporter')
+from prometheus_exporter import PrometheusExporterVars
+from prometheus_exporter import BuildPrometheusMetrics
+from prometheus_exporter import PrometheusEndpoint
+from prometheus_exporter import PrometheusExporter
+
+class TestBuildPrometheusMetrics(unittest.TestCase):
+ def setUp(self):
+ self.builder = BuildPrometheusMetrics()
+
+ def test_escape_metric_name(self):
+ metric_name = "tsg_master_log:(){}/\\%*$-,;"
+ escaped = self.builder._BuildPrometheusMetrics__escape_metric_name(metric_name)
+ self.assertEqual(escaped, "tsg_master_log:____________")
+
+
+ def test_escape_metric_tags(self):
+ #regex: [a-zA-Z_][a-zA-Z0-9_]*
+ json_dict = {'name': '-', 'tags': {'send_log': 'sum', 'policy_id': 1, 'quanlity': 0.5},
+ 'fields': {'T_success_log': 1}, 'timestamp': 1694657637836}
+ tags = self.builder._BuildPrometheusMetrics__escape_metric_tags(json_dict)
+ self.assertEqual(tags, "{send_log=\"sum\",policy_id=\"1\",quanlity=\"0.5\",app_name=\"-\"}")
+
+
+ def test_build_one_metric(self):
+ name = "send_log"
+ tags = "{app_name=\"firewall\"}"
+ value = 100
+
+ one_metric = self.builder._BuildPrometheusMetrics__build_one_metric(name, tags, value)
+ self.assertEqual(one_metric, "send_log{app_name=\"firewall\"} 100\n")
+
+
+ def test_build_metrics_payload(self):
+ payload = self.builder.build_metrics_payload()
+ self.assertEqual(payload, "")
+
+ self.builder.json_path = "/tmp/t.json"
+ payload = self.builder.build_metrics_payload()
+ self.assertNotEqual(payload, "")
+
+
+class TestPrometheusEndpoint(unittest.TestCase):
+ @classmethod
+ def setUpClass(cls):
+ PrometheusExporterVars.uri_path = "/metrics"
+ server_address = ('', 40001)
+ cls.httpd = HTTPServer(server_address, PrometheusEndpoint)
+ cls.server_thread = Thread(target=cls.httpd.serve_forever)
+ cls.server_thread.start()
+
+ @classmethod
+ def tearDownClass(cls):
+ cls.httpd.shutdown()
+ cls.httpd.server_close()
+ cls.server_thread.join()
+
+ def test_valid_request(self):
+ response = urlopen('http://localhost:40001/metrics')
+ print(response.getcode())
+ self.assertEqual(response.getcode(), 200)
+
+ def test_invalid_request(self):
+ try:
+ urlopen('http://localhost:40001/invalid')
+ except urllib.error.HTTPError as e:
+ self.assertEqual(e.code, 404)
+
+
+class TestPrometheusExporter(unittest.TestCase):
+ def setUp(self):
+ self.exporter = PrometheusExporter()
+
+ def test_verify_cmd_args_valid(self):
+ result = self.exporter._PrometheusExporter__verify_cmd_args()
+ self.assertEqual(result, 0)
+
+ def test_verify_cmd_args_invalid(self):
+ self.exporter.listen_port = 1000
+ result = self.exporter._PrometheusExporter__verify_cmd_args()
+ self.assertEqual(result, -1)
+
+ def test_read_cmd_options(self):
+ self.exporter.read_cmd_options()
+ self.assertEqual(self.exporter.listen_port, 8080)
+ self.assertEqual(PrometheusExporterVars.hist_format, "summary")
+ self.assertEqual(PrometheusExporterVars.hist_bins, "0.1,0.5,0.8,0.9,0.95,0.99")
+ self.assertEqual(PrometheusExporterVars.json_path, "./fieldstat.json")
+ self.assertEqual(PrometheusExporterVars.uri_path, "/metrics")
+
+
+if __name__ == '__main__':
+ unittest.main() \ No newline at end of file