summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorfumingwei <[email protected]>2023-09-19 15:43:33 +0800
committerfumingwei <[email protected]>2023-09-28 15:41:19 +0800
commita3a977aa0e07bc5947d5fa4d4dc18637bee2fee2 (patch)
treec0cb160fef35dc6faf70f253d5aef064d7eb9c9c
parent4fd06167f2f7a513d646c7698949430ff703ae82 (diff)
feature:合并prometheus和local exporter到fieldstat_exporter
-rw-r--r--src/exporter/fieldstat_exporter.py754
-rw-r--r--src/exporter/local_exporter.py349
-rw-r--r--src/exporter/prometheus_exporter.py245
-rw-r--r--test/test_exporter_python.cpp3
-rw-r--r--test/test_fieldstat_exporter.py893
-rw-r--r--test/test_prometheus_exporter.py100
6 files changed, 1649 insertions, 695 deletions
diff --git a/src/exporter/fieldstat_exporter.py b/src/exporter/fieldstat_exporter.py
new file mode 100644
index 0000000..1f1b16f
--- /dev/null
+++ b/src/exporter/fieldstat_exporter.py
@@ -0,0 +1,754 @@
+#!/usr/bin/python3
+
+import argparse
+import sys
+import json
+import re
+import os
+import logging
+import shutil
+import datetime
+import time
+import copy
+from prettytable import PrettyTable,NONE,HEADER
+from http.server import HTTPServer, BaseHTTPRequestHandler
+
+import ctypes
+
+class FieldstatAPI:
+ libfieldstat = ctypes.CDLL('libfieldstat4.so')
+ libfieldstat.fieldstat_histogram_base64_decode.argtypes = [ctypes.c_char_p]
+ libfieldstat.fieldstat_histogram_base64_decode.restype = ctypes.c_void_p
+
+ libfieldstat.fieldstat_histogram_free.argtypes = [ctypes.c_void_p]
+
+ libfieldstat.fieldstat_histogram_value_at_percentile.argtypes = [ctypes.c_void_p, ctypes.c_double]
+ libfieldstat.fieldstat_histogram_value_at_percentile.restype = ctypes.c_longlong
+
+ libfieldstat.fieldstat_histogram_count_le_value.argtypes = [ctypes.c_void_p, ctypes.c_longlong]
+ libfieldstat.fieldstat_histogram_count_le_value.restype = ctypes.c_longlong
+
+ libfieldstat.fieldstat_histogram_value_total_count.argtypes = [ctypes.c_void_p]
+ libfieldstat.fieldstat_histogram_value_total_count.restype = ctypes.c_longlong
+
+ libfieldstat.fieldstat_histogram_value_min.argtypes = [ctypes.c_void_p]
+ libfieldstat.fieldstat_histogram_value_min.restype = ctypes.c_longlong
+
+ libfieldstat.fieldstat_histogram_value_max.argtypes = [ctypes.c_void_p]
+ libfieldstat.fieldstat_histogram_value_max.restype = ctypes.c_longlong
+
+ libfieldstat.fieldstat_histogram_value_mean.argtypes = [ctypes.c_void_p]
+ libfieldstat.fieldstat_histogram_value_mean.restype = ctypes.c_double
+
+ libfieldstat.fieldstat_histogram_value_stddev.argtypes = [ctypes.c_void_p]
+ libfieldstat.fieldstat_histogram_value_stddev.restype = ctypes.c_double
+
+ libfieldstat.fieldstat_hll_base64_to_count.argtypes = [ctypes.c_char_p]
+ libfieldstat.fieldstat_hll_base64_to_count.restype = ctypes.c_double
+
+ libfieldstat.fieldstat_histogram_value_sum.argtypes = [ctypes.c_void_p]
+ libfieldstat.fieldstat_histogram_value_sum.restype = ctypes.c_longlong
+
+ libfieldstat.fieldstat_is_hll.argtypes = [ctypes.c_char_p]
+ libfieldstat.fieldstat_is_hll.restype = ctypes.c_longlong
+
+logging.basicConfig(level=logging.DEBUG, format='%(asctime)s - %(message)s')
+
+################################################################################
+# global variables
+################################################################################
+class FieldstatExporterVars:
+ local_display_counter = False
+ local_display_hist = False
+ local_display_hll = False
+ local_match_tags = {}
+
+ prom_uri_path = ""
+
+ json_path = ""
+ hist_format = ""
+ hist_bins = []
+
+################################################################################
+# promethues exporter
+################################################################################
+class PrometheusExporter:
+ def __init__(self):
+ self.hist_bins = FieldstatExporterVars.hist_bins
+ self.hist_format = FieldstatExporterVars.hist_format
+ self.json_path = FieldstatExporterVars.json_path
+ self.n_lines = 0
+
+ def __escape_metric_name(self, metric_name):
+ #regex: [a-zA-Z_:][a-zA-Z0-9_:]*
+ name = re.sub(r'[^a-zA-Z0-9_:]', '_', metric_name)
+ return name
+
+ def __escape_metric_tags(self, json_dict):
+ #regex: [a-zA-Z_][a-zA-Z0-9_]*
+ dst_tags = []
+ src_tags = json_dict["tags"]
+ src_tags["app_name"] = json_dict["name"]
+
+ for key,value in src_tags.items():
+ dst_key = re.sub(r'[^a-zA-Z0-9_:]', '_', key)
+ dst_val = str(value)
+ dst_tags.append(f'{dst_key}="{dst_val}"')
+
+ return ','.join(dst_tags)
+
+ def __build_type_counter(self, name, tags, value):
+ metric = name + "{" + tags + "}" + ' ' + str(value) + '\n'
+ self.n_lines += 1
+ return metric
+
+
+ def __build_histogram_format(self, name, tags, c_hist):
+ metrics = ""
+
+ for i in self.hist_bins:
+ value = FieldstatAPI.libfieldstat.fieldstat_histogram_count_le_value(c_hist, int(i))
+ metric = name + "_bucket" + "{" + tags + ",le=\"{:.2f}\"".format(i) + "}" + ' ' + str(value) + '\n'
+ metrics += metric
+ self.n_lines += 1
+
+ return metrics
+
+ def __build_summary_format(self, name, tags, c_hist):
+ metrics = ""
+ for i in self.hist_bins:
+ value = FieldstatAPI.libfieldstat.fieldstat_histogram_value_at_percentile(c_hist, float(i * 100))
+ metric = name + "{" + tags + ",quantile=\"{:.2f}%\"".format(i * 100) + "}" + ' ' + str(value) + '\n'
+ metrics += metric
+ self.n_lines += 1
+
+ return metrics
+
+ def __build_type_histogram(self, name, tags, value):
+ metrics = ""
+ c_hist = FieldstatAPI.libfieldstat.fieldstat_histogram_base64_decode(value.encode('utf-8'))
+
+ if self.hist_format == "summary":
+ metrics += self.__build_summary_format(name, tags, c_hist)
+ if self.hist_format == "histogram":
+ metrics += self.__build_histogram_format(name, tags, c_hist)
+
+ sum_value = FieldstatAPI.libfieldstat.fieldstat_histogram_value_sum(c_hist)
+ metrics += name + "_sum" + "{" + tags + "}" + ' ' + str(sum_value) + '\n'
+ self.n_lines += 1
+
+ cnt_value = FieldstatAPI.libfieldstat.fieldstat_histogram_value_total_count(c_hist)
+ metrics += name + "_count" + "{" + tags + "}" + ' ' + str(cnt_value) + '\n'
+ self.n_lines += 1
+
+ FieldstatAPI.libfieldstat.fieldstat_histogram_free(c_hist)
+
+ return metrics
+
+
+ def __build_type_hll(self, name, tags, value):
+ hll_value = FieldstatAPI.libfieldstat.fieldstat_hll_base64_to_count(value.encode('utf-8'))
+ self.n_lines += 1
+ return name + "{" + tags + "}" + ' ' + "{:.2f}".format(hll_value) + '\n'
+
+ def __build_metrics(self, json_dict):
+ metrics = ""
+ escaped_tags = self.__escape_metric_tags(json_dict)
+ for key,value in json_dict["fields"].items():
+ escaped_name = self.__escape_metric_name(key)
+ if isinstance(value, int):
+ if key.endswith("_delta"):
+ continue
+ metrics += self.__build_type_counter(escaped_name, escaped_tags, value)
+ else:
+ is_hll = FieldstatAPI.libfieldstat.fieldstat_is_hll(value.encode('utf-8'))
+ if is_hll:
+ metrics += self.__build_type_hll(escaped_name, escaped_tags, value)
+ else:
+ metrics += self.__build_type_histogram(escaped_name, escaped_tags, value)
+ return metrics
+
+ def build_metrics_payload(self):
+ payload = ""
+
+ if not os.path.exists(self.json_path):
+ logging.error("Path: {%s} does not exist", self.json_path)
+ return payload
+
+ with open(self.json_path) as file:
+ json_data = json.load(file)
+ for item in json_data:
+ payload += self.__build_metrics(item)
+
+ return payload
+
+ def read_lines_num(self):
+ return self.n_lines
+
+ @classmethod
+ def run_prometheus_exporter(cls):
+ builder = cls()
+ return builder.build_metrics_payload()
+
+class PrometheusEndpoint(BaseHTTPRequestHandler):
+ def __init__(self, request, client_address, server):
+ self.desired_path = FieldstatExporterVars.prom_uri_path
+ super().__init__(request, client_address, server)
+
+ def do_GET(self):
+ if self.path == self.desired_path:
+ resp = PrometheusExporter.run_prometheus_exporter()
+ self.send_response(200)
+ self.send_header('Content-type', 'text/plain')
+ self.end_headers()
+ self.wfile.write(resp.encode())
+ else:
+ resp = "uri path:" + self.desired_path
+ self.send_response(404)
+ self.end_headers()
+ self.wfile.write(resp.encode())
+
+
+
+################################################################################
+# local exporter
+################################################################################
+class CounterTable:
+ INFO_COLUMN_WIDTH = 11 # len(speed/s) + 2 * (" ") + 2 * ("|")
+ COLUMM_PADDING = 3 # 2 *(" ") + "|"
+
+ def __init__(self):
+ self.terminal_size, _ = shutil.get_terminal_size((128, 64))
+ self.column_size = self.terminal_size - self.INFO_COLUMN_WIDTH
+ self.info_column = ("", ["tags", "sum", "speed/s"])
+ self.min_width = self.INFO_COLUMN_WIDTH
+ self.tables = []
+ self.columns = []
+ self.rows = []
+ self.field_names = []
+ self.rows = []
+
+ def add_field_names(self, fields):
+ if len(self.field_names) == 0:
+ sorted_keys = sorted(fields.keys())
+ self.field_names.append("")
+ self.field_names.extend(sorted_keys)
+
+ def append_field_rows(self, tags, match_tags, fields):
+ row = []
+ new_tags = {}
+
+ #table only one tags
+ if len(tags) - len(match_tags) != 1:
+ return
+
+ for key in tags:
+ if key not in match_tags:
+ new_tags[key] = tags[key]
+ break
+
+ for key, value in new_tags.items():
+ row.append("%s_%s" % (key, str(value)))
+
+ sorted_keys = sorted(fields.keys())
+ for key in sorted_keys:
+ row.append(fields[key])
+
+ self.rows.append(row)
+
+
+ def add_table_column(self, tags, head, value, speed_s):
+ column = (head, [tags, str(value), "{:.2f}".format(speed_s)])
+ self.columns.append(column)
+ self.min_width = max(self.min_width, len(tags), len(head), len(str(value)))
+
+ def __build_one_table(self, columns_slice):
+ table = PrettyTable()
+ table.vrules = NONE
+ table.hrules = NONE
+ # append info column into table
+ table.add_column(self.info_column[0], self.info_column[1], align = "l" )
+ for item in columns_slice:
+ # append data column into table
+ table.add_column(item[0], item[1], align="r")
+ # parameters max length
+ table.min_width[item[0]] = self.min_width
+
+ return table
+
+ def __build_columns_tables(self):
+ # One table print in screen size
+ # One table per screen size
+ n_columns = len(self.columns)
+ if n_columns == 0:
+ return
+
+ table_size = self.column_size // (self.min_width + self.COLUMM_PADDING)
+ if 0 == table_size:
+ table_size = 1
+
+
+ for i in range(0, n_columns, table_size):
+ table = None
+ l_edge = i
+ r_edge = min(i + table_size, n_columns)
+
+ if r_edge >= n_columns:
+ table = self.__build_one_table(self.columns[l_edge:])
+ else:
+ table = self.__build_one_table(self.columns[l_edge:r_edge])
+
+ self.tables.append(table)
+
+ def __build_rows_tables(self):
+ if len(self.field_names) == 0 or len(self.rows) == 0:
+ return
+ table = PrettyTable()
+ table.vrules = NONE
+ table.hrules = NONE
+ table.field_names = self.field_names
+
+ for item in self.field_names:
+ table.align[item] = "r"
+ table.align[""] = "l"
+
+ for row in self.rows:
+ table.add_row(row)
+
+ self.tables.append(table)
+
+ def read_columns_num(self):
+ return len(self.columns)
+
+ def read_rows_num(self):
+ return len(self.rows)
+
+ def read_tables_num(self):
+ return len(self.tables)
+
+ def print_tables(self):
+ self.__build_columns_tables()
+ self.__build_rows_tables()
+ for item in self.tables:
+ print(item)
+
+
+class HistogramTable:
+ def __init__(self):
+ self.format = FieldstatExporterVars.hist_format
+ self.bins = FieldstatExporterVars.hist_bins
+ self.tables = []
+
+ def __build_summary_format(self, c_hist, table):
+ for i in self.bins:
+ head = "{:.2f}%".format(i * 100)
+ row = FieldstatAPI.libfieldstat.fieldstat_histogram_value_at_percentile(c_hist, float(i * 100))
+ table.add_column(head, [row])
+
+ def __build_histogram_format(self, c_hist, table):
+ for i in self.bins:
+ head = "le={:d}".format(int(i))
+ row = FieldstatAPI.libfieldstat.fieldstat_histogram_value_at_percentile(c_hist, int(i))
+ table.add_column(head, [row])
+
+
+ def build_table(self, tags, key, value):
+ table = PrettyTable()
+ c_hist = FieldstatAPI.libfieldstat.fieldstat_histogram_base64_decode(value.encode('utf-8'))
+ if self.format == "summary":
+ self.__build_summary_format(c_hist, table)
+ if self.format == "histogram":
+ self.__build_histogram_format(c_hist, table)
+
+ max_value = FieldstatAPI.libfieldstat.fieldstat_histogram_value_max(c_hist)
+ table.add_column("MAX", [max_value])
+
+ min_value = FieldstatAPI.libfieldstat.fieldstat_histogram_value_min(c_hist)
+ table.add_column("MIN", [min_value])
+
+ avg_value = FieldstatAPI.libfieldstat.fieldstat_histogram_value_mean(c_hist)
+ table.add_column("AVG", ["{:.2f}".format(avg_value)])
+
+ dev_value = FieldstatAPI.libfieldstat.fieldstat_histogram_value_stddev(c_hist)
+ table.add_column("STDDEV", ["{:.2f}".format(dev_value)])
+
+ cnt_value = FieldstatAPI.libfieldstat.fieldstat_histogram_value_total_count(c_hist)
+ table.add_column("CNT", [cnt_value])
+
+ FieldstatAPI.libfieldstat.fieldstat_histogram_free(c_hist)
+
+ table.title = key + " " + tags
+ self.tables.append(table)
+
+ def read_tables_num(self):
+ return len(self.tables)
+
+ def print_tables(self):
+ for item in self.tables:
+ print(item)
+
+
+class LocalExporter:
+ def __init__(self):
+ self.terminal_size, _ = shutil.get_terminal_size((128, 64))
+ self.json_path = FieldstatExporterVars.json_path
+ self.ctable = CounterTable()
+ self.htable = HistogramTable()
+ self.hlltable = CounterTable()
+ self.display_counter = FieldstatExporterVars.local_display_counter
+ self.display_hist = FieldstatExporterVars.local_display_hist
+ self.display_hll = FieldstatExporterVars.local_display_hll
+ self.match_tags = FieldstatExporterVars.local_match_tags
+ self.is_counter_table = False
+ self.__set_default_display()
+
+ def __set_default_display(self):
+ #default print all type metrics
+ if not (self.display_counter or self.display_hist or self.display_hll):
+ self.display_counter = True
+ self.display_hist = True
+ self.display_hll = True
+
+ def __dealwith_counter(self, tags, key, value, speed_s):
+ self.ctable.add_table_column(tags, key, value, speed_s)
+
+ def __dealwith_histogram(self, tags, key, value):
+ self.htable.build_table(tags, key, value)
+
+ def __dealwith_hll(self, tags, key, value):
+ hll_value = FieldstatAPI.libfieldstat.fieldstat_hll_base64_to_count(value.encode('utf-8'))
+ self.hlltable.add_table_column(tags, key, "{:.2f}".format(hll_value), 0)
+
+ def __parse_json_tags(self, json_object):
+ tags_dict = copy.deepcopy(json_object["tags"])
+ tags_dict.update({"app_name": json_object["name"]})
+ return json.dumps(tags_dict)
+
+ def __get_counter_speed_value(self, key, fields, json_object):
+ delta_key = key + "_delta"
+ if delta_key in fields:
+ delta_val = fields[delta_key]
+ delta_ms = json_object["timestamp_ms"] - json_object["timestamp_ms_delta"]
+ speed_s = delta_val * 1000 / delta_ms
+ return speed_s
+ return 0
+
+
+ def __match_tags(self, tags):
+ if len(self.match_tags) == 0:
+ return True
+
+ if len(tags) == 0:
+ return False
+
+ if len(self.match_tags) > len(tags):
+ return False
+
+ for key, value in self.match_tags.items():
+ if key not in tags:
+ return False
+ if value != tags[key]:
+ return False
+
+ return True
+
+ #table: one same tags + same fields keys
+ def __is_counter_table(self, json_list):
+ is_first_elem = True
+ prev_tags = {}
+ prev_fields = {}
+
+ #only one counter type json object, counter type print normal mode
+ if len(json_list) <= 1:
+ return False
+
+ for json_object in json_list:
+ tags = copy.deepcopy(json_object["tags"])
+
+ if not self.__match_tags(tags):
+ continue
+
+ for _, value in json_object["fields"].items():
+ if isinstance(value, str):
+ return False
+
+ for key in self.match_tags:
+ tags.pop(key, None)
+
+ #fields no one tags. print normal mode.
+ if len(tags) != 1:
+ return False
+
+ if is_first_elem:
+ prev_tags = tags
+ prev_fields = json_object["fields"]
+ is_first_elem = False
+ continue
+ else:
+ if prev_tags.keys() == tags.keys() and \
+ prev_fields.keys() == json_object["fields"].keys():
+ continue
+ else:
+ return False
+
+ return True
+
+
+ def __parse_json_object(self, json_object):
+ tags = self.__parse_json_tags(json_object)
+ fields = json_object["fields"]
+
+ if not self.__match_tags(json_object["tags"]):
+ return
+
+ for key,value in fields.items():
+ if not isinstance(value, str):
+ if key.endswith("_delta"):
+ continue
+ speed_s = self.__get_counter_speed_value(key, fields, json_object)
+ if self.is_counter_table:
+ self.ctable.add_field_names(fields)
+ self.ctable.append_field_rows(json_object["tags"], self.match_tags, fields)
+ continue
+ else:
+ self.__dealwith_counter(tags, key, value, speed_s)
+ else:
+ is_hll = FieldstatAPI.libfieldstat.fieldstat_is_hll(value.encode('utf-8'))
+ if is_hll:
+ self.__dealwith_hll(tags, key, value)
+ else:
+ self.__dealwith_histogram(tags, key, value)
+
+ def parse_data(self):
+ if not os.path.exists(self.json_path):
+ logging.error("Path: {%s} does not exist", self.json_path)
+ return
+ with open(self.json_path) as file:
+ data = json.load(file)
+ self.is_counter_table = self.__is_counter_table(data)
+ for json_object in data:
+ self.__parse_json_object(json_object)
+
+ def __print_top_edge(self):
+ timestamp = datetime.datetime.now().timestamp()
+ formatted_time = datetime.datetime.fromtimestamp(timestamp).strftime('%a %b %d %H:%M:%S %Y')
+
+ num_of_equals = (self.terminal_size - len(formatted_time)) // 2
+
+ result = '=' * num_of_equals + formatted_time + '=' * num_of_equals
+ print(result)
+
+ def __print_bottom_edge(self):
+ print('-' * self.terminal_size)
+
+ def print_data(self):
+ self.__print_top_edge()
+
+ if self.display_counter:
+ self.ctable.print_tables()
+
+ if self.display_hist:
+ self.htable.print_tables()
+
+ if self.display_hll:
+ self.hlltable.print_tables()
+
+ self.__print_bottom_edge()
+
+
+ @classmethod
+ def run_local_exporter(cls):
+ praser = cls()
+ praser.parse_data()
+ praser.print_data()
+
+
+
+################################################################################
+# fieldstat exporter
+################################################################################
+class FieldstatExporter:
+ DEFAULT_LISTEN_PORT = 8080
+ DEFAULT_HIST_BINS = [0.1,0.5,0.8,0.9,0.95,0.99]
+ DEFAULT_HIST_FORMAT = "summary"
+ DEFAULT_JSON_PATH = "./fieldstat.json"
+ DEFAULT_URI_PATH = "/metrics"
+ DEFAULT_INTERVAL_S = 1
+
+ def __init__(self):
+ self.local_clear_screen = False
+ self.local_enable_loop = False
+ self.local_interval_s = 1
+ self.prom_listen_port = self.DEFAULT_LISTEN_PORT
+ self.exporter_mode = "local"
+
+ def __build_shared_args_parser(self):
+ bins_str = ','.join([str(x) for x in self.DEFAULT_HIST_BINS])
+
+ parser = argparse.ArgumentParser(add_help=False)
+ parser.add_argument("-b", "--hist-bins", type = str, default = bins_str,
+ help = "The metrics of histogram type output bins.")
+ parser.add_argument("-f", "--hist-format", type = str, default = self.DEFAULT_HIST_FORMAT,
+ help = "The metrics of histogram type output format.")
+ parser.add_argument("-j", "--json-path", type = str, default = self.DEFAULT_JSON_PATH,
+ help = "The input fieldstat metrics json file path.")
+ return parser
+
+ def __build_prom_parser(self, subparsers, shared_arg_parser):
+ parser = subparsers.add_parser('prometheus', help='Set prometheus exporter', parents=[shared_arg_parser])
+ parser.add_argument("-p", "--listen-port", type = int, default = self.DEFAULT_LISTEN_PORT,
+ help = "Specify the prometheus endpoint port to listen. i.e., 80,8080")
+ parser.add_argument("-u", "--uri-path", type = str, default = self.DEFAULT_URI_PATH,
+ help = "Specify the prometheus endpoint uri path.")
+
+ def __build_local_parser(self, subparsers, shared_arg_parser):
+ parser = subparsers.add_parser('local', help='Set local exporter', parents=[shared_arg_parser])
+ parser.add_argument("-i", "--interval", type = int, default = self.DEFAULT_INTERVAL_S,
+ help = "interval, seconds to wait between print.")
+ parser.add_argument("-l", "--loop", action = 'store_true', default = False,
+ help = "print loop, exit when recv a signal.")
+ parser.add_argument('--clear-screen', action = 'store_true', default = False,
+ help = 'clear screen at start of loop')
+ parser.add_argument('--display-hll' , action = 'store_true', default = False,
+ help = 'Display hyperloglog type metrics')
+ parser.add_argument('--display-hist', action = 'store_true', default = False,
+ help = 'Display histogram type metrics')
+ parser.add_argument('--display-counter', action = 'store_true', default = False,
+ help = 'Display counter type metrics')
+ parser.add_argument("-m", "--match-tags", type = str, default = "",
+ help = "Display the tags match metrics")
+
+
+ def __parse_bins_str(self, bins_str):
+ bins = []
+ for item in bins_str.split(','):
+ bins.append(float(item))
+ bins_sort = list(set(bins))
+ bins_sort.sort()
+ return bins_sort
+
+ def __parse_tags_str(self, tags_str):
+ tags_dict = {}
+ if not tags_str:
+ return tags_dict
+ pairs = tags_str.split(',')
+ for pair in pairs:
+ key, value = pair.split(':')
+ temp_value = value.strip()
+ if temp_value.isdigit():
+ tags_dict[key] = int(temp_value)
+ elif re.match(r'^[-+]?[0-9]*\.?[0-9]+([eE][-+]?[0-9]+)?$', temp_value):
+ tags_dict[key] = float(temp_value)
+ else:
+ tags_dict[key] = temp_value
+
+ return tags_dict
+
+ def __read_shared_args_value(self, args):
+ FieldstatExporterVars.hist_format = args.hist_format
+ FieldstatExporterVars.json_path = args.json_path
+ FieldstatExporterVars.hist_bins = self.__parse_bins_str(args.hist_bins)
+
+ def __read_private_args_value(self, args):
+ if args.command == 'prometheus':
+ FieldstatExporterVars.prom_uri_path = args.uri_path
+ self.exporter_mode = 'prometheus'
+ self.prom_listen_port = args.listen_port
+
+ if args.command == 'local':
+ FieldstatExporterVars.local_display_counter = args.display_counter
+ FieldstatExporterVars.local_display_hll = args.display_hll
+ FieldstatExporterVars.local_display_hist = args.display_hist
+ FieldstatExporterVars.local_match_tags = self.__parse_tags_str(args.match_tags)
+ self.exporter_mode = 'local'
+ self.local_interval_s = args.interval
+ self.local_enable_loop = args.loop
+ self.local_clear_screen = args.clear_screen
+
+ def __verify_cmd_args(self, args):
+ if args.hist_format not in ["summary", "histogram"]:
+ logging.error("Historgram format must be 'summary' or 'histogram'")
+ return -1
+
+ bins = args.hist_bins.split(',')
+ if args.hist_format == "summary":
+ for bin in bins:
+ if not re.match(r'^0\.([1-9]|[0][1-9]|[1-9]\d)$', bin):
+ logging.error("When historgram format is %s, bins value in [0.01-0.99], "
+ "bins format example: 0.1,0,2,0.3", args.hist_format)
+ return -1
+ if args.hist_format == "histogram":
+ for bin in bins:
+ if not re.match(r'^\d+$', bin):
+ logging.error("When historgram format is %s, bins value is integer, "
+ "bins format example: 1,10,100,1000", args.hist_format)
+ return -1
+
+ if args.command == 'prometheus':
+ if args.listen_port < 1024 or args.listen_port > 65535:
+ logging.error("invalid listen port, listen port must be in [1024, 65535]")
+ return -1
+
+ if args.command == 'local':
+ if args.match_tags != "":
+ pattern = r'^([A-Za-z0-9_-]+:[A-Za-z0-9_\-\.]+,)*([A-Za-z0-9_-]+:[A-Za-z0-9_\-\.]+)$'
+ match = re.match(pattern, args.match_tags)
+ if not match:
+ logging.error("invalid match tags, Example: key1:value1,key2:value2,key3:value3")
+ return -1
+
+ return 0
+
+ def read_cmd_options(self):
+ parser = argparse.ArgumentParser(description='Fieldstat exporter')
+ shared_args_parser = self.__build_shared_args_parser()
+ subparsers = parser.add_subparsers(dest='command')
+ subparsers.required = True
+ self.__build_prom_parser(subparsers, shared_args_parser)
+ self.__build_local_parser(subparsers, shared_args_parser)
+
+ args = parser.parse_args()
+
+ if -1 == self.__verify_cmd_args(args):
+ parser.print_help()
+ sys.exit(1)
+
+ self.__read_shared_args_value(args)
+ self.__read_private_args_value(args)
+
+
+ def __enable_prometheus_endpoint(self):
+ server_address = ('', self.prom_listen_port)
+ httpd = HTTPServer(server_address, PrometheusEndpoint)
+ httpd.serve_forever()
+
+ def __enable_local_exporter(self):
+ while True:
+ if self.local_clear_screen:
+ os.system('clear')
+ LocalExporter.run_local_exporter()
+
+ if not self.local_enable_loop:
+ break;
+ time.sleep(self.local_interval_s)
+
+ def fieldstat_export(self):
+ try:
+ if self.exporter_mode == 'prometheus':
+ self.__enable_prometheus_endpoint()
+
+ if self.exporter_mode == 'local':
+ self.__enable_local_exporter()
+
+ except KeyboardInterrupt:
+ pass
+
+ @classmethod
+ def run_fieldstat_exporter(cls):
+ exporter = cls()
+ exporter.read_cmd_options()
+ exporter.fieldstat_export()
+
+
+if __name__ == '__main__':
+ FieldstatExporter.run_fieldstat_exporter()
diff --git a/src/exporter/local_exporter.py b/src/exporter/local_exporter.py
deleted file mode 100644
index 7226082..0000000
--- a/src/exporter/local_exporter.py
+++ /dev/null
@@ -1,349 +0,0 @@
-#!/usr/bin/python3
-
-import argparse
-import sys
-import json
-import re
-import os
-import logging
-import shutil
-import datetime
-import time
-from prettytable import PrettyTable,NONE,HEADER
-
-import ctypes
-
-libfieldstat = ctypes.CDLL('libfieldstat4.so')
-libfieldstat.histogram_base64_decode.argtypes = [ctypes.c_char_p]
-libfieldstat.histogram_base64_decode.restype = ctypes.c_void_p
-
-libfieldstat.histogram_free.argtypes = [ctypes.c_void_p]
-
-libfieldstat.histogram_value_at_percentile.argtypes = [ctypes.c_void_p, ctypes.c_double]
-libfieldstat.histogram_value_at_percentile.restype = ctypes.c_longlong
-
-libfieldstat.histogram_count_le_value.argtypes = [ctypes.c_void_p, ctypes.c_longlong]
-libfieldstat.histogram_count_le_value.restype = ctypes.c_longlong
-
-libfieldstat.histogram_value_total_count.argtypes = [ctypes.c_void_p]
-libfieldstat.histogram_value_total_count.restype = ctypes.c_longlong
-
-libfieldstat.histogram_value_min.argtypes = [ctypes.c_void_p]
-libfieldstat.histogram_value_min.restype = ctypes.c_longlong
-
-libfieldstat.histogram_value_max.argtypes = [ctypes.c_void_p]
-libfieldstat.histogram_value_max.restype = ctypes.c_longlong
-
-libfieldstat.histogram_value_mean.argtypes = [ctypes.c_void_p]
-libfieldstat.histogram_value_mean.restype = ctypes.c_double
-
-libfieldstat.histogram_value_stddev.argtypes = [ctypes.c_void_p]
-libfieldstat.histogram_value_stddev.restype = ctypes.c_double
-
-libfieldstat.hll_base64_to_count.argtypes = [ctypes.c_char_p]
-libfieldstat.hll_base64_to_count.restype = ctypes.c_double
-
-logging.basicConfig(level=logging.DEBUG, format='%(asctime)s - %(message)s')
-
-'''
-[
- {
- "name": "-",
- "tags": {
- "send_log": "sum",
- "policy_id": 1,
- "quanlity": 0.500000
- },
- "fields": {
- "T_success_log": 1
- },
- "timestamp": 1694657637836
- },
- {
- "name": "-",
- "tags": {
- "send_log": "SECURITY-EVENT",
- "policy_id": 1,
- "quanlity": 0.500000
- },
- "fields": {
- "T_success_log": 1
- },
- "timestamp": 1694657637836
- }
-]
-'''
-
-class LocalExporterVars:
- terminal_size = 1
- json_path = ""
- hist_format = ""
- hist_bins = []
-
-class CounterTable:
- INFO_COLUMN_WIDTH = 11 # len(speed/s) + 2 * (" ") + 2 * ("|")
- COLUMM_PADDING = 3 # 2 *(" ") + "|"
-
- def __init__(self):
- self.column_size = LocalExporterVars.terminal_size - self.INFO_COLUMN_WIDTH
- self.info_column = ("", ["tags", "sum", "speed/s"])
- self.min_width = self.INFO_COLUMN_WIDTH
- self.tables = []
- self.columns = []
-
- def build_table_column(self, tags, key, value):
- column = (key, [tags, str(value), ""])
- self.columns.append(column)
- self.min_width = max(self.min_width, len(tags), len(key), len(str(value)))
-
- def __build_one_table(self, columns_slice):
- table = PrettyTable()
- table.vrules = NONE
- table.hrules = NONE
- # append info column into table
- table.add_column(self.info_column[0], self.info_column[1], align = "l" )
- for item in columns_slice:
- # append data column into table
- table.add_column(item[0], item[1], align="r")
- # parameters max length
- table.min_width[item[0]] = self.min_width
-
- return table
-
- def __build_tables(self):
- # One table print in screen size
- # One table per screen size
- table_size = self.column_size // (self.min_width + self.COLUMM_PADDING)
- if 0 == table_size:
- table_size = 1
- n_columns = len(self.columns)
-
- for i in range(0, n_columns, table_size):
- table = None
- l_edge = i
- r_edge = min(i + table_size, n_columns)
-
- if r_edge >= n_columns:
- table = self.__build_one_table(self.columns[l_edge:])
- else:
- table = self.__build_one_table(self.columns[l_edge:r_edge])
-
- self.tables.append(table)
-
- def print_tables(self):
- self.__build_tables()
- for item in self.tables:
- print(item)
-
-
-class HistogramTable:
- def __init__(self):
- self.format = LocalExporterVars.hist_format
- self.bins = LocalExporterVars.hist_bins
- self.tables = []
-
- def __build_summary_format(self, c_hist, table):
- for i in LocalExporterVars.hist_bins:
- header = "{:.2f}%".format(i * 100)
- row = libfieldstat.histogram_value_at_percentile(c_hist, float(i * 100))
- table.add_column(header, [row])
-
- def __build_histogram_format(self, c_hist, table):
- for i in LocalExporterVars.hist_bins:
- header = "le={:d}".format(i)
- row = libfieldstat.histogram_value_at_percentile(c_hist, int(i))
- table.add_column(header, [row])
-
-
- def build_table(self, tags, key, value):
- table = PrettyTable()
- c_hist = libfieldstat.histogram_base64_decode(value.encode('utf-8'))
- if self.format == "summary":
- self.__build_summary_format(c_hist, table)
- if self.format == "histogram":
- self.__build_histogram_format(c_hist, table)
-
- max_value = libfieldstat.histogram_value_max(c_hist)
- table.add_column("MAX", [max_value])
-
- min_value = libfieldstat.histogram_value_min(c_hist)
- table.add_column("MIN", [min_value])
-
- avg_value = libfieldstat.histogram_value_mean(c_hist)
- table.add_column("AVG", ["{:.2f}".format(avg_value)])
-
- dev_value = libfieldstat.histogram_value_stddev(c_hist)
- table.add_column("STDDEV", ["{:.2f}".format(dev_value)])
-
- cnt_value = libfieldstat.histogram_value_total_count(c_hist)
- table.add_column("CNT", [cnt_value])
-
- libfieldstat.histogram_free(c_hist)
-
- table.title = key + " " + tags
- self.tables.append(table)
-
- def print_tables(self):
- for item in self.tables:
- print(item)
-
-class JsonDataParser:
- def __init__(self):
- self.json_path = LocalExporterVars.json_path
- self.ctable = CounterTable()
- self.htable = HistogramTable()
- self.hlltable = CounterTable()
-
- def __dealwith_counter(self, tags, key, value):
- self.ctable.build_table_column(tags, key, value)
-
- def __dealwith_histogram(self, tags, key, value):
- self.htable.build_table(tags, key, value)
-
- def __dealwith_hll(self, tags, key, value):
- hll_value = libfieldstat.hll_base64_to_count(value.encode('utf-8'))
- self.hlltable.build_table_column(tags, key, "{:.2f}".format(hll_value))
-
- def __parse_json_tags(self, json_object):
- tags_dict = json_object["tags"]
- tags_dict.update({"app_name": json_object["name"]})
- return json.dumps(tags_dict)
-
- def __parse_json_object(self, json_object):
- tags = self.__parse_json_tags(json_object)
- fields = json_object["fields"]
-
- for key,value in fields.items():
- if isinstance(value, int):
- self.__dealwith_counter(tags, key, value)
- else:
- is_hll = libfieldstat.is_hll(value.encode('utf-8'))
- if is_hll:
- self.__dealwith_hll(tags, key, value)
- else:
- self.__dealwith_histogram(tags, key, value)
-
- def parse_data(self):
- if not os.path.exists(self.json_path):
- logging.error("Path: {%s} does not exist", self.json_path)
- return
- with open(self.json_path) as file:
- data = json.load(file)
- for json_object in data:
- self.__parse_json_object(json_object)
-
- def __print_top_edge(self):
- timestamp = datetime.datetime.now().timestamp()
- formatted_time = datetime.datetime.fromtimestamp(timestamp).strftime('%a %b %d %H:%M:%S %Y')
-
- num_of_equals = (LocalExporterVars.terminal_size - len(formatted_time)) // 2
-
- result = '=' * num_of_equals + formatted_time + '=' * num_of_equals
- print(result)
-
- def __print_bottom_edge(self):
- print('-' * LocalExporterVars.terminal_size)
-
- def print_data(self):
- self.__print_top_edge()
- self.ctable.print_tables()
- print("\n")
- self.htable.print_tables()
- print("\n")
- self.hlltable.print_tables()
- self.__print_bottom_edge()
-
- @classmethod
- def run_json_data_parser(cls):
- praser = cls()
- praser.parse_data()
- praser.print_data()
-
-class LocalExporter:
- DEFAULT_HIST_BINS = [0.1,0.5,0.8,0.9,0.95,0.99]
- DEFAULT_HIST_FORMAT = "summary"
- DEFAULT_JSON_PATH = "./fieldstat.json"
- DEFAULT_INTERVAL_S = 1
-
- def __init__(self):
- self.is_loop = False
- self.interval_s = self.DEFAULT_INTERVAL_S
-
- def __verify_cmd_args(self, args):
- if args.hist_format not in ["summary", "histogram"]:
- logging.error("When historgram format must be 'summary' or 'histogram'")
- return -1
-
- bins = args.hist_bins.split(',')
-
- if args.hist_format == "summary":
- for bin in bins:
- if not re.match(r'^0\.([1-9]|[0][1-9]|[1-9]\d)$', bin):
- logging.error("When historgram format is %s, bins value in [0.01-0.99], "
- "bins format example: 0.1,0,2,0.3", args.hist_format)
- return -1
- if args.hist_format == "histogram":
- for bin in bins:
- if not re.match(r'^\d+(\.\d+)?$', bin):
- logging.error("When historgram format is %s, bins value is integer or decimal, "
- "bins format example: 0.1,1,10,100,1000", args.hist_format)
- return -1
- return 0
-
- def __parse_bins_str(self, bins_str):
- bins = []
- for item in bins_str.split(','):
- bins.append(float(item))
- return list(set(bins))
-
- def read_cmd_options(self):
- bins_str = ','.join([str(x) for x in self.DEFAULT_HIST_BINS])
-
- parser = argparse.ArgumentParser(description='Fieldstat local exporter')
-
- parser.add_argument("-b", "--hist-bins", type = str, default = bins_str,
- help = "The metrics of histogram type output bins.")
- parser.add_argument("-f", "--hist-format", type = str, default = self.DEFAULT_HIST_FORMAT,
- help = "The metrics of histogram type output format.")
- parser.add_argument("-j", "--json-path", type = str, default = self.DEFAULT_JSON_PATH,
- help = "The input fieldstat metrics json file path.")
- parser.add_argument("-i", "--interval", type = int, default = self.DEFAULT_INTERVAL_S,
- help = "interval, seconds to wait between print.")
-
- parser.add_argument("-l", "--loops", action='store_true', default = False,
- help = "print loop, exit when recv a signal.")
-
- args = parser.parse_args()
-
- if -1 == self.__verify_cmd_args(args):
- parser.print_help()
- sys.exit(1)
-
- LocalExporterVars.hist_format = args.hist_format
- LocalExporterVars.json_path = args.json_path
- LocalExporterVars.hist_bins = self.__parse_bins_str(args.hist_bins)
-
- self.interval_s = args.interval
- self.is_loop = args.loops
-
- def local_export(self):
- try:
- while True:
- LocalExporterVars.terminal_size, _ = shutil.get_terminal_size((128, 64))
- JsonDataParser.run_json_data_parser()
- if not self.is_loop:
- break;
- time.sleep(self.interval_s)
- except KeyboardInterrupt:
- pass
-
- @classmethod
- def run_local_exporter(cls):
- exporter = cls()
- exporter.read_cmd_options()
- exporter.local_export()
-
-
-
-if __name__ == '__main__':
- LocalExporter.run_local_exporter()
diff --git a/src/exporter/prometheus_exporter.py b/src/exporter/prometheus_exporter.py
deleted file mode 100644
index 66a57a8..0000000
--- a/src/exporter/prometheus_exporter.py
+++ /dev/null
@@ -1,245 +0,0 @@
-#!/usr/bin/python3
-
-import argparse
-import sys
-import json
-import re
-import os
-from http.server import HTTPServer, BaseHTTPRequestHandler
-
-import ctypes
-
-libfieldstat = ctypes.CDLL('libfieldstat4.so')
-libfieldstat.histogram_base64_decode.argtypes = [ctypes.c_char_p]
-libfieldstat.histogram_base64_decode.restype = ctypes.c_void_p
-
-libfieldstat.histogram_free.argtypes = [ctypes.c_void_p]
-
-libfieldstat.histogram_value_at_percentile.argtypes = [ctypes.c_void_p, ctypes.c_double]
-libfieldstat.histogram_value_at_percentile.restype = ctypes.c_longlong
-
-libfieldstat.histogram_count_le_value.argtypes = [ctypes.c_void_p, ctypes.c_longlong]
-libfieldstat.histogram_count_le_value.restype = ctypes.c_longlong
-
-libfieldstat.histogram_value_total_count.argtypes = [ctypes.c_void_p]
-libfieldstat.histogram_value_total_count.restype = ctypes.c_longlong
-
-libfieldstat.histogram_value_sum.argtypes = [ctypes.c_void_p]
-libfieldstat.histogram_value_sum.restype = ctypes.c_longlong
-
-libfieldstat.hll_base64_to_count.argtypes = [ctypes.c_char_p]
-libfieldstat.hll_base64_to_count.restype = ctypes.c_double
-
-import logging
-logging.basicConfig(level=logging.DEBUG, format='%(asctime)s - %(message)s')
-
-class PrometheusExporterVars:
- hist_bins = []
- hist_format = ""
- json_path = ""
- uri_path = ""
-
-class BuildPrometheusMetrics:
- def __init__(self):
- self.hist_bins = PrometheusExporterVars.hist_bins
- self.hist_format = PrometheusExporterVars.hist_format
- self.json_path = PrometheusExporterVars.json_path
-
- def __escape_metric_name(self, metric_name):
- #regex: [a-zA-Z_:][a-zA-Z0-9_:]*
- escaped_name = re.sub(r'[^a-zA-Z0-9_:]', '_', metric_name)
- return escaped_name
-
- def __escape_metric_tags(self, json_dict):
- #regex: [a-zA-Z_][a-zA-Z0-9_]*
- escaped_tags = []
- tags_dict = json_dict["tags"]
- tags_dict["app_name"] = json_dict["name"]
-
- for key,value in tags_dict.items():
- escaped_key = re.sub(r'[^a-zA-Z0-9_:]', '_', key)
- value_str = str(value)
- escaped_tags.append(f'{escaped_key}="{value_str}"')
-
- return ','.join(escaped_tags)
-
- def __build_type_counter(self, name, tags, value):
- return self.__escape_metric_name(name) + "{" + tags + "}" + ' ' + str(value) + '\n'
-
-
- def __build_histogram_format(self, name, tags, c_hist):
- metrics = ""
- for i in self.hist_bins:
- value = libfieldstat.histogram_count_le_value(c_hist, int(i))
- metric = name + "_bucket" + "{" + tags + ",le=\"{:.2f}\"".format(i) + "}" + ' ' + str(value) + '\n'
- metrics += metric
-
- return metrics
-
- def __build_summary_format(self, name, tags, c_hist):
- metrics = ""
- for i in self.hist_bins:
- value = libfieldstat.histogram_value_at_percentile(c_hist, float(i * 100))
- metric = name + "{" + tags + ",quantile=\"{:.2f}%\"".format(i * 100) + "}" + ' ' + str(value) + '\n'
- metrics += metric
-
- return metrics
-
- def __build_type_histogram(self, name, tags, value):
- escaped_name = self.__escape_metric_name(name)
- metrics = ""
-
- c_hist = libfieldstat.histogram_base64_decode(value.encode('utf-8'))
-
- if self.hist_format == "summary":
- metrics += self.__build_summary_format(escaped_name, tags, c_hist)
- if self.hist_format == "histogram":
- metrics += self.__build_histogram_format(escaped_name, tags, c_hist)
-
- sum_value = libfieldstat.histogram_value_sum(c_hist)
- metrics += escaped_name + "_sum" + "{" + tags + "}" + ' ' + str(sum_value) + '\n'
-
- cnt_value = libfieldstat.histogram_value_total_count(c_hist)
- metrics += escaped_name + "_count" + "{" + tags + "}" + ' ' + str(cnt_value) + '\n'
-
- libfieldstat.histogram_free(c_hist)
-
- return metrics
-
-
- def __build_type_hll(self, name, tags, value):
- hll_value = libfieldstat.hll_base64_to_count(value.encode('utf-8'))
- return name + "{" + tags + "}" + ' ' + "{:.2f}".format(hll_value) + '\n'
-
- def __build_metrics(self, json_dict):
- metrics = ""
- metric_tags = self.__escape_metric_tags(json_dict)
- for key,value in json_dict["fields"].items():
- if isinstance(value, int):
- metrics += self.__build_type_counter(key, metric_tags, value)
- else:
- is_hll = libfieldstat.is_hll(value.encode('utf-8'))
- if is_hll:
- metrics += self.__build_type_hll(key, metric_tags, value)
- else:
- metrics += self.__build_type_histogram(key, metric_tags, value)
- return metrics
-
- def build_metrics_payload(self):
- payload = ""
-
- if not os.path.exists(self.json_path):
- logging.error("Path: {%s} does not exist", self.json_path)
- return payload
-
- with open(self.json_path) as file:
- json_data = json.load(file)
- for item in json_data:
- payload += self.__build_metrics(item)
-
- return payload
-
- @classmethod
- def run_build_metrics(cls):
- builder = cls()
- return builder.build_metrics_payload()
-
-
-class PrometheusEndpoint(BaseHTTPRequestHandler):
- def __init__(self, request, client_address, server):
- self.desired_path = PrometheusExporterVars.uri_path
- super().__init__(request, client_address, server)
-
- def do_GET(self):
- if self.path == self.desired_path:
- resp = BuildPrometheusMetrics.run_build_metrics()
- self.send_response(200)
- self.send_header('Content-type', 'text/plain; version=0.0.4')
- self.end_headers()
- self.wfile.write(resp.encode())
- else:
- self.send_error(404)
- self.end_headers()
-
-class PrometheusExporter:
- DEFAULT_LISTEN_PORT = 8080
- DEFAULT_HIST_BINS = [0.1,0.5,0.8,0.9,0.95,0.99]
- DEFAULT_HIST_FORMAT = "summary"
- DEFAULT_JSON_PATH = "./fieldstat.json"
- DEFAULT_URI_PATH = "/metrics"
-
- def __init__(self):
- self.listen_port = self.DEFAULT_LISTEN_PORT
-
- def __verify_cmd_args(self, args):
- if args.hist_format not in ["summary", "histogram"]:
- logging.error("When historgram format must be 'summary' or 'histogram'")
- return -1
-
- bins = args.hist_bins.split(',')
-
- if args.hist_format == "summary":
- for bin in bins:
- if not re.match(r'^0\.([1-9]|[0][1-9]|[1-9]\d)$', bin):
- logging.error("When historgram format is %s, bins value in [0.01-0.99], "
- "bins format example: 0.1,0,2,0.3", args.hist_format)
- return -1
- if args.hist_format == "histogram":
- for bin in bins:
- if not re.match(r'^\d+(\.\d+)?$', bin):
- logging.error("When historgram format is %s, bins value is integer or decimal, "
- "bins format example: 0.1,1,10,100,1000", args.hist_format)
- return -1
- return 0
-
- def __parse_bins_str(self, bins_str):
- bins = []
- for item in bins_str.split(','):
- bins.append(float(item))
-
- n_bins = list(set(bins))
- n_bins.sort()
- return bins
-
- def read_cmd_options(self):
- bins_str = ','.join([str(x) for x in self.DEFAULT_HIST_BINS])
-
- parser = argparse.ArgumentParser(description='Fieldstat prometheus exporter.')
-
- parser.add_argument("-p", "--listen-port", type = int, default = self.DEFAULT_LISTEN_PORT,
- help = "Specify the prometheus endpoint port to listen. i.e., 80,8080")
- parser.add_argument("-u", "--uri-path", type = str, default = self.DEFAULT_URI_PATH,
- help = "Specify the prometheus endpoint uri path.")
- parser.add_argument("-b", "--hist-bins", type = str, default = bins_str,
- help = "The metrics of histogram type output bins.")
- parser.add_argument("-f", "--hist-format", type = str, default = self.DEFAULT_HIST_FORMAT,
- help = "The metrics of histogram type output format.")
- parser.add_argument("-j", "--json-path", type = str, default = self.DEFAULT_JSON_PATH,
- help = "The input fieldstat metrics json file path.")
-
- args = parser.parse_args()
-
- if -1 == self.__verify_cmd_args(args):
- parser.print_help()
- sys.exit(1)
-
- self.listen_port = args.listen_port
- PrometheusExporterVars.uri_path = args.uri_path
- PrometheusExporterVars.hist_bins = self.__parse_bins_str(args.hist_bins)
- PrometheusExporterVars.hist_format = args.hist_format
- PrometheusExporterVars.json_path = args.json_path
-
- def enable_prometheus_endpoint(self):
- server_address = ('', self.listen_port)
- httpd = HTTPServer(server_address, PrometheusEndpoint)
- httpd.serve_forever()
-
- @classmethod
- def run_prometheus_endpoints(cls):
- exporter = cls()
- exporter.read_cmd_options()
- exporter.enable_prometheus_endpoint()
-
-
-if __name__ == '__main__':
- PrometheusExporter.run_prometheus_endpoints() \ No newline at end of file
diff --git a/test/test_exporter_python.cpp b/test/test_exporter_python.cpp
index f81ae0b..c8912d7 100644
--- a/test/test_exporter_python.cpp
+++ b/test/test_exporter_python.cpp
@@ -146,7 +146,8 @@ TEST(ExporterLocal, TableBuild)
fieldstat_merge(merger, hll);
fieldstat_merge(merger, hist);
fieldstat_merge(merger, table);
- struct fieldstat_json_exporter *exporter = fieldstat_json_exporter_new(hll);
+ struct fieldstat_json_exporter *exporter = fieldstat_json_exporter_new(merger);
+ fieldstat_json_exporter_enable_delta(exporter);
char *str_json = fieldstat_json_exporter_export(exporter, &current);
printf(str_json);
}
diff --git a/test/test_fieldstat_exporter.py b/test/test_fieldstat_exporter.py
new file mode 100644
index 0000000..2d2cb22
--- /dev/null
+++ b/test/test_fieldstat_exporter.py
@@ -0,0 +1,893 @@
+import unittest
+import sys
+import urllib
+from urllib.request import urlopen
+from threading import Thread
+from http.server import HTTPServer, BaseHTTPRequestHandler
+import argparse
+import random
+import math
+from io import StringIO
+from unittest.mock import patch
+from contextlib import redirect_stdout
+from prettytable import PrettyTable,NONE,HEADER
+
+import os
+current_path = os.path.dirname(os.path.abspath(__file__))
+sys.path.append(current_path + '../src/exporter')
+
+from fieldstat_exporter import FieldstatAPI
+
+from fieldstat_exporter import FieldstatExporterVars
+
+from fieldstat_exporter import FieldstatExporter
+from fieldstat_exporter import PrometheusExporter
+from fieldstat_exporter import PrometheusEndpoint
+
+from fieldstat_exporter import CounterTable
+from fieldstat_exporter import HistogramTable
+from fieldstat_exporter import LocalExporter
+
+
+
+FIELDSTAT_INPUT_JSON_PATH = "/tmp/fieldstat.json"
+
+
+class TestPrometheusExporter(unittest.TestCase):
+ def setUp(self):
+ self.hist_val = "HISTEwAAAGQAAAAAAAAAAwAAAAAAAAABAAAAAAAJJ8A/8AAAAAAAA"\
+ "AEEAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAg"\
+ "ICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgI"\
+ "CAgICAgICAgICAgICAgICAgICAgIAAAAAAAAA"
+ self.prom = PrometheusExporter()
+
+
+ def test__escape_metric_name(self):
+ name = "tsg_master_log:(){}/\\%*$-,;"
+ ret = self.prom._PrometheusExporter__escape_metric_name(name)
+ self.assertEqual(ret, "tsg_master_log:____________")
+
+ def test__escape_metric_tags(self):
+ json_obj = {'name': '-',
+ 'tags': {
+ 'send(){}/\\%*$-,;': 'sum',
+ 'policy_id': 1,
+ 'quanlity': 0.5
+ },
+ 'fields': {'T_success_log': 1},
+ 'timestamp': 1694657637836}
+ tags = self.prom._PrometheusExporter__escape_metric_tags(json_obj)
+
+ self.assertEqual(tags, "send____________=\"sum\",policy_id=\"1\",quanlity=\"0.5\",app_name=\"-\"")
+
+ def test__build_type_counter(self):
+ name = "tsg_master_log"
+ tags = "send_log=\"sum\",policy_id=\"1\",app_name=\"-\""
+ value = 100
+
+ metric = self.prom._PrometheusExporter__build_type_counter(name, tags, value)
+ self.assertEqual(metric, "tsg_master_log{send_log=\"sum\",policy_id=\"1\",app_name=\"-\"} 100\n")
+
+
+ def test__build_histogram_format(self):
+ c_hist = FieldstatAPI.libfieldstat.fieldstat_histogram_base64_decode(self.hist_val.encode('utf-8'))
+ name = "tsg_master_log"
+ tags = "policy_id=\"1\",app_name=\"-\""
+ self.prom.hist_bins = [10,20,50,80,90,95,99]
+
+ metrics = self.prom._PrometheusExporter__build_histogram_format(name, tags, c_hist)
+
+ desired = "tsg_master_log_bucket{policy_id=\"1\",app_name=\"-\",le=\"10.00\"} 10\n"\
+ "tsg_master_log_bucket{policy_id=\"1\",app_name=\"-\",le=\"20.00\"} 20\n"\
+ "tsg_master_log_bucket{policy_id=\"1\",app_name=\"-\",le=\"50.00\"} 50\n"\
+ "tsg_master_log_bucket{policy_id=\"1\",app_name=\"-\",le=\"80.00\"} 80\n"\
+ "tsg_master_log_bucket{policy_id=\"1\",app_name=\"-\",le=\"90.00\"} 90\n"\
+ "tsg_master_log_bucket{policy_id=\"1\",app_name=\"-\",le=\"95.00\"} 95\n"\
+ "tsg_master_log_bucket{policy_id=\"1\",app_name=\"-\",le=\"99.00\"} 99\n"
+
+ self.assertEqual(metrics, desired)
+
+
+ def test__build_summary_format(self):
+ c_hist = FieldstatAPI.libfieldstat.fieldstat_histogram_base64_decode(self.hist_val.encode('utf-8'))
+ name = "tsg_master_log"
+ tags = "policy_id=\"1\",app_name=\"-\""
+ self.prom.hist_bins = [0.1,0.2,0.5,0.8,0.9,0.95,0.99]
+
+ metrics = self.prom._PrometheusExporter__build_summary_format(name, tags, c_hist)
+
+ desired = "tsg_master_log{policy_id=\"1\",app_name=\"-\",quantile=\"10.00%\"} 9\n" \
+ "tsg_master_log{policy_id=\"1\",app_name=\"-\",quantile=\"20.00%\"} 19\n"\
+ "tsg_master_log{policy_id=\"1\",app_name=\"-\",quantile=\"50.00%\"} 49\n"\
+ "tsg_master_log{policy_id=\"1\",app_name=\"-\",quantile=\"80.00%\"} 79\n"\
+ "tsg_master_log{policy_id=\"1\",app_name=\"-\",quantile=\"90.00%\"} 89\n"\
+ "tsg_master_log{policy_id=\"1\",app_name=\"-\",quantile=\"95.00%\"} 94\n"\
+ "tsg_master_log{policy_id=\"1\",app_name=\"-\",quantile=\"99.00%\"} 98\n"
+ self.assertEqual(metrics, desired)
+
+
+ def test__build_type_histogram(self):
+ name = "tsg_master_log"
+ tags = "policy_id=\"1\",app_name=\"-\""
+ value = self.hist_val
+
+ self.prom.hist_bins = [0.1,0.2,0.5,0.8,0.9,0.95,0.99]
+ self.prom.hist_format = "summary"
+
+ metrics = self.prom._PrometheusExporter__build_type_histogram(name, tags, value)
+
+ desired = "tsg_master_log{policy_id=\"1\",app_name=\"-\",quantile=\"10.00%\"} 9\n" \
+ "tsg_master_log{policy_id=\"1\",app_name=\"-\",quantile=\"20.00%\"} 19\n"\
+ "tsg_master_log{policy_id=\"1\",app_name=\"-\",quantile=\"50.00%\"} 49\n"\
+ "tsg_master_log{policy_id=\"1\",app_name=\"-\",quantile=\"80.00%\"} 79\n"\
+ "tsg_master_log{policy_id=\"1\",app_name=\"-\",quantile=\"90.00%\"} 89\n"\
+ "tsg_master_log{policy_id=\"1\",app_name=\"-\",quantile=\"95.00%\"} 94\n"\
+ "tsg_master_log{policy_id=\"1\",app_name=\"-\",quantile=\"99.00%\"} 98\n"\
+ "tsg_master_log_sum{policy_id=\"1\",app_name=\"-\"} 4950\n"\
+ "tsg_master_log_count{policy_id=\"1\",app_name=\"-\"} 100\n"
+ self.assertEqual(metrics, desired)
+
+ self.prom.hist_bins = [10,20,50,80,90,95,99]
+ self.prom.hist_format = "histogram"
+
+ metrics = self.prom._PrometheusExporter__build_type_histogram(name, tags, value)
+
+ desired = "tsg_master_log_bucket{policy_id=\"1\",app_name=\"-\",le=\"10.00\"} 10\n"\
+ "tsg_master_log_bucket{policy_id=\"1\",app_name=\"-\",le=\"20.00\"} 20\n"\
+ "tsg_master_log_bucket{policy_id=\"1\",app_name=\"-\",le=\"50.00\"} 50\n"\
+ "tsg_master_log_bucket{policy_id=\"1\",app_name=\"-\",le=\"80.00\"} 80\n"\
+ "tsg_master_log_bucket{policy_id=\"1\",app_name=\"-\",le=\"90.00\"} 90\n"\
+ "tsg_master_log_bucket{policy_id=\"1\",app_name=\"-\",le=\"95.00\"} 95\n"\
+ "tsg_master_log_bucket{policy_id=\"1\",app_name=\"-\",le=\"99.00\"} 99\n"\
+ "tsg_master_log_sum{policy_id=\"1\",app_name=\"-\"} 4950\n"\
+ "tsg_master_log_count{policy_id=\"1\",app_name=\"-\"} 100\n"
+ self.assertEqual(metrics, desired)
+
+
+ def test__build_type_hll(self):
+ name = "tsg_master_log"
+ tags = "policy_id=\"1\",app_name=\"-\""
+ value = "AQUBDECDAQxAQQUIIEEJCDCFARgQRAUIMIMAAAECAAAAAAAAAA=="
+
+ metric = self.prom._PrometheusExporter__build_type_hll(name, tags, value)
+
+ self.assertEqual(metric, "tsg_master_log{policy_id=\"1\",app_name=\"-\"} 62.61\n")
+
+
+ def test__build_metrics(self):
+ counter_dict = {"name": "-",
+ "tags": {
+ "send_log": "PROXY-EVENT",
+ "policy_id": 1,
+ "quanlity": 0.50
+ },
+ "fields": {
+ "T_success_log": 1,
+ "T_success_log_delta": 1
+ },
+ "timestamp_ms": 100010,
+ "timestamp_ms_delta": 0
+ }
+ metrics = self.prom._PrometheusExporter__build_metrics(counter_dict)
+ self.assertEqual(metrics, "T_success_log{send_log=\"PROXY-EVENT\",policy_id=\"1\",quanlity=\"0.5\",app_name=\"-\"} 1\n")
+
+ hll_dict = {"name": "-",
+ "tags": {
+ "rule_id": 1
+ },
+ "fields": {
+ "external_ip": "AQUBDECDAQxAQQUIIEEJCDCFARgQRAUIMIMAAAECAAAAAAAAAA==",
+ },
+ "timestamp_ms": 100010,
+ "timestamp_ms_delta": 100010
+ }
+ metrics = self.prom._PrometheusExporter__build_metrics(hll_dict)
+ self.assertEqual(metrics, "external_ip{rule_id=\"1\",app_name=\"-\"} 62.61\n")
+
+ hist_dict = {"name": "-",
+ "tags": {
+ "thread_id": 1,
+ "hit_rate": 1.10,
+ "rule_id": 1,
+ "action": "deny"
+ },
+ "fields": {
+ "list_num": "HISTEwAAAGQAAAAAAAAAAwAAAAAAAAABAAAAAAAJJ8A/8AAAAAAAAAEEAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgIAAAAAAAAA"
+ },
+ "timestamp_ms": 100010,
+ "timestamp_ms_delta": 0
+ }
+ self.prom.hist_bins = [0.1,0.2,0.5,0.8,0.9,0.95,0.99]
+ self.prom.hist_format = "summary"
+ metrics = self.prom._PrometheusExporter__build_metrics(hist_dict)
+
+ desired = "list_num{thread_id=\"1\",hit_rate=\"1.1\",rule_id=\"1\",action=\"deny\",app_name=\"-\",quantile=\"10.00%\"} 9\n"\
+ "list_num{thread_id=\"1\",hit_rate=\"1.1\",rule_id=\"1\",action=\"deny\",app_name=\"-\",quantile=\"20.00%\"} 19\n"\
+ "list_num{thread_id=\"1\",hit_rate=\"1.1\",rule_id=\"1\",action=\"deny\",app_name=\"-\",quantile=\"50.00%\"} 49\n"\
+ "list_num{thread_id=\"1\",hit_rate=\"1.1\",rule_id=\"1\",action=\"deny\",app_name=\"-\",quantile=\"80.00%\"} 79\n"\
+ "list_num{thread_id=\"1\",hit_rate=\"1.1\",rule_id=\"1\",action=\"deny\",app_name=\"-\",quantile=\"90.00%\"} 89\n"\
+ "list_num{thread_id=\"1\",hit_rate=\"1.1\",rule_id=\"1\",action=\"deny\",app_name=\"-\",quantile=\"95.00%\"} 94\n"\
+ "list_num{thread_id=\"1\",hit_rate=\"1.1\",rule_id=\"1\",action=\"deny\",app_name=\"-\",quantile=\"99.00%\"} 98\n"\
+ "list_num_sum{thread_id=\"1\",hit_rate=\"1.1\",rule_id=\"1\",action=\"deny\",app_name=\"-\"} 4950\n"\
+ "list_num_count{thread_id=\"1\",hit_rate=\"1.1\",rule_id=\"1\",action=\"deny\",app_name=\"-\"} 100\n"
+ self.assertEqual(metrics, desired)
+
+
+ def test_build_metrics_payload(self):
+ self.prom.hist_bins = [0.1,0.2,0.5,0.8,0.9,0.95,0.99]
+ self.prom.hist_format = "summary"
+ self.prom.json_path = "/tmp/invalid_path.json"
+ self.prom.n_lines = 0
+
+ payload = self.prom.build_metrics_payload()
+ self.assertEqual(payload, "")
+ self.prom.json_path = FIELDSTAT_INPUT_JSON_PATH
+ payload = self.prom.build_metrics_payload()
+ lines = payload.split('\n')
+ self.assertEqual(len(lines), 93 + 1)
+ self.assertEqual(self.prom.read_lines_num(), 93)
+
+class TestPrometheusEndpoint(unittest.TestCase):
+ @classmethod
+ def setUpClass(cls):
+ FieldstatExporterVars.prom_uri_path = "/metrics"
+ server_address = ('', 40001)
+ cls.httpd = HTTPServer(server_address, PrometheusEndpoint)
+ cls.server_thread = Thread(target=cls.httpd.serve_forever)
+ cls.server_thread.start()
+
+ @classmethod
+ def tearDownClass(cls):
+ cls.httpd.shutdown()
+ cls.httpd.server_close()
+ cls.server_thread.join()
+
+ def test_valid_request(self):
+ response = urlopen('http://localhost:40001/metrics')
+ self.assertEqual(response.getcode(), 200)
+
+ def test_invalid_request(self):
+ try:
+ urlopen('http://localhost:40001/invalid')
+ except urllib.error.HTTPError as e:
+ self.assertEqual(e.code, 404)
+
+
+class TestCounterTable(unittest.TestCase):
+ def setUp(self):
+ self.c_table = CounterTable()
+
+
+ def test_add_field_names(self):
+ self.c_table.field_names = []
+
+ fields_names_0 = {"column0": 0, "column1": 1}
+ self.c_table.add_field_names(fields_names_0)
+ self.assertEqual(self.c_table.field_names, ["", "column0", "column1"])
+
+ fields_names_1 = {"column2": 2, "column3": 3}
+ self.c_table.add_field_names(fields_names_1)
+ self.assertEqual(self.c_table.field_names, ["", "column0", "column1"])
+
+
+ def test_append_field_rows(self):
+ self.c_table.field_names = []
+
+ fields_names_0 = {"column0": 0, "column1": 1}
+ self.c_table.append_field_rows({"name": "0"}, {}, fields_names_0)
+ self.assertEqual(self.c_table.rows[0], ["name_0", 0, 1])
+
+ fields_names_1 = {"column2": 2, "column3": 3}
+ self.c_table.append_field_rows({"name": "1"}, {}, fields_names_1)
+ self.assertEqual(self.c_table.rows[1], ["name_1", 2, 3])
+
+
+ def test_add_table_column(self):
+ head = "policy_hit"
+ tags = "{\"thread_id\": 1,\"action\": \"deny\"}"
+ value = 100
+ speed_s = 1.1
+
+ self.c_table.columns = []
+ self.c_table.add_table_column(tags, head, value, speed_s)
+
+ self.assertEqual(len(self.c_table.columns), 1)
+ self.assertEqual(self.c_table.read_columns_num(), 1)
+ self.assertEqual(self.c_table.columns[-1], ("policy_hit", ["{\"thread_id\": 1,\"action\": \"deny\"}", "100", "1.10"]))
+
+
+ def test__build_one_table(self):
+ columns = [("policy_hit", ["{\"thread_id\": 1,\"action\": \"deny\"}", "100", "1.10"])]
+ table = self.c_table._CounterTable__build_one_table(columns)
+
+ self.assertEqual(len(table.field_names), 2)
+
+
+ def test__build_columns_tables(self):
+ self.c_table.columns = []
+ for i in range(100):
+ head = "h" + str(i)
+ self.c_table.columns.append((head, ["{\"thread_id\": 1,\"action\": \"deny\"}", "100", "1.10"]))
+
+ self.c_table.min_width = 3
+ for _ in range(5):
+ self.c_table.column_size = random.randint(1, 100)
+ self.c_table.tables = []
+ self.c_table._CounterTable__build_columns_tables()
+
+ table_size = self.c_table.column_size //(self.c_table.min_width + self.c_table.COLUMM_PADDING)
+ if 0 == table_size:
+ table_size = 1
+
+ self.assertEqual(len(self.c_table.tables), math.ceil(100/table_size))
+ self.assertEqual(self.c_table.read_tables_num(), math.ceil(100/table_size))
+
+
+ def test__build_rows_tables(self):
+ self.c_table.field_names = ["", "column0", "column1"]
+ self.c_table.rows = [["row0", 1, 1], ["row1", 2, 2]]
+
+ n_tables_before = self.c_table.read_tables_num()
+ self.c_table._CounterTable__build_rows_tables()
+ n_table_after = self.c_table.read_tables_num()
+ self.assertEqual(n_table_after - n_tables_before, 1)
+
+
+ def test_print_tables(self):
+ self.c_table.columns = []
+ for i in range(100):
+ head = "h" + str(i)
+ self.c_table.columns.append((head, ["{\"thread_id\": 1,\"action\": \"deny\"}", "100", "1.10"]))
+
+ self.c_table.min_width = 3
+ for _ in range(5):
+ self.c_table.column_size = random.randint(1, 100)
+ self.c_table.tables = []
+
+ table_size = self.c_table.column_size //(self.c_table.min_width + self.c_table.COLUMM_PADDING)
+ if 0 == table_size:
+ table_size = 1
+
+ output = StringIO()
+ sys.stdout = output
+ self.c_table.print_tables()
+ output_str = output.getvalue()
+ sys.stdout = sys.__stdout__
+
+ self.assertEqual(len(self.c_table.tables), math.ceil(100/table_size))
+ self.assertEqual(self.c_table.read_tables_num(), math.ceil(100/table_size))
+ self.assertEqual(len(output_str.split('\n')), math.ceil(100/table_size) * 4 + 1)
+
+
+
+class TestHistogramTable(unittest.TestCase):
+ def setUp(self):
+ self.h_table = HistogramTable()
+ self.hist_val = "HISTEwAAAGQAAAAAAAAAAwAAAAAAAAABAAAAAAAJJ8A/8AAAAAAAA"\
+ "AEEAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAg"\
+ "ICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgI"\
+ "CAgICAgICAgICAgICAgICAgICAgIAAAAAAAAA"
+ self.c_hist = FieldstatAPI.libfieldstat.fieldstat_histogram_base64_decode(self.hist_val.encode('utf-8'))
+
+ def test__build_summary_format(self):
+ for _ in range(5):
+ table = PrettyTable()
+ n_bins = random.randint(1, 100)
+ self.h_table.bins = []
+ for i in range(1, n_bins + 1):
+ self.h_table.bins.append(i * 0.01)
+
+ self.h_table._HistogramTable__build_histogram_format(self.c_hist, table)
+ self.assertEqual(len(table.field_names), n_bins)
+
+
+ def test__build_histogram_format(self):
+ for _ in range(5):
+ table = PrettyTable()
+ n_bins = random.randint(1, 100)
+ self.h_table.bins = []
+ for i in range(1, n_bins + 1):
+ self.h_table.bins.append(i)
+
+ self.h_table._HistogramTable__build_histogram_format(self.c_hist, table)
+ self.assertEqual(len(table.field_names), n_bins)
+
+
+ def test_build_table(self):
+ tags = "{\"thread_id\": 1,\"action\": \"deny\"}"
+ key = "policy_hit"
+ value = self.hist_val
+
+ for _ in range(5):
+ n_bins = random.randint(1, 100)
+ self.h_table.bins = []
+ for i in range(1, n_bins + 1):
+ self.h_table.bins.append(i * 0.01)
+
+ self.h_table.build_table(tags, key, value)
+ table = self.h_table.tables[-1]
+ self.assertEqual(len(table.field_names), n_bins + 5)
+
+ for _ in range(5):
+ n_operate = random.randint(1, 100)
+ self.h_table.tables = []
+ for _ in range (1, n_operate + 1):
+ self.h_table.build_table(tags, key, value)
+ self.assertEqual(len(self.h_table.tables), n_operate)
+
+
+ def test_print_tables(self):
+ tags = "{\"thread_id\": 1,\"action\": \"deny\"}"
+ key = "policy_hit"
+ value = self.hist_val
+
+ for _ in range(5):
+ n_operate = random.randint(1, 100)
+ self.h_table.tables = []
+ for _ in range (n_operate):
+ self.h_table.build_table(tags, key, value)
+
+ output = StringIO()
+ sys.stdout = output
+ self.h_table.print_tables()
+ output_str = output.getvalue()
+ sys.stdout = sys.__stdout__
+
+ self.assertEqual(len(self.h_table.tables), n_operate)
+ self.assertEqual(len(output_str.split('\n')), n_operate * 7 + 1)
+
+
+ def tearDown(self):
+ FieldstatAPI.libfieldstat.fieldstat_histogram_free(self.c_hist)
+
+
+
+class TestLocalExporter(unittest.TestCase):
+ def setUp(self):
+ self.local = LocalExporter()
+ self.tags = "{\"thread_id\": 1,\"action\": \"deny\"}"
+ self.key = "policy_hit"
+
+
+ self.counter_json_object = { "name": "-",
+ "tags": {
+ "send_log": "sum"
+ },
+ "fields": {
+ "T_fail_log": 2,
+ "T_fail_log_delta": 2
+ },
+ "timestamp_ms": 1000,
+ "timestamp_ms_delta": 0
+ }
+
+ self.hll_json_object = { "name": "-",
+ "tags": {
+ "rule_id": 1
+ },
+ "fields": {
+ "acc_ip": "AQUFEGDCAhAwhAMMIQQBBBCDBRBggQMEMIcAAADCAAAAAAAAAA=="
+ },
+ "timestamp_ms": 100010,
+ "timestamp_ms_delta": 100010
+ }
+
+
+ self.hist_json_object = { "name": "-",
+ "tags": {
+ "action": "deny"
+ },
+ "fields": {
+ "list_num": "HISTEwAAAGQAAAAAAAAAAwAAAAAAAAABAAAAAAAJJ8A/8AAAAAAA"\
+ "AAEEAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgIC"\
+ "AgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgIC"\
+ "AgICAgICAgICAgICAgICAgICAgICAgIAAAAAAAAA"
+ },
+ "timestamp_ms": 100010,
+ "timestamp_ms_delta": 0
+ }
+
+
+ def test__set_default_display(self):
+ for i in [True, False]:
+ for j in [True, False]:
+ for k in [True, False]:
+ self.local.display_counter = i
+ self.local.display_hist = j
+ self.local.display_hll = k
+ self.local._LocalExporter__set_default_display()
+ if not (i or j or k):
+ self.assertEqual(self.local.display_counter, True)
+ self.assertEqual(self.local.display_hist, True)
+ self.assertEqual(self.local.display_hll, True)
+ else:
+ self.assertEqual(self.local.display_counter, i)
+ self.assertEqual(self.local.display_hist, j)
+ self.assertEqual(self.local.display_hll, k)
+
+
+ def test__dealwith_counter(self):
+ value = 100
+ speed_s = 1.1
+
+ peradd = len(self.local.ctable.columns)
+ self.local._LocalExporter__dealwith_counter(self.tags, self.key, value, speed_s)
+ postadd = len(self.local.ctable.columns)
+
+ self.assertEqual(postadd - peradd, 1)
+
+
+ def test__dealwith_histogram(self):
+ hist_val = "HISTEwAAAGQAAAAAAAAAAwAAAAAAAAABAAAAAAAJJ8A/8AAAAAAAA"\
+ "AEEAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAg"\
+ "ICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgI"\
+ "CAgICAgICAgICAgICAgICAgICAgIAAAAAAAAA"
+
+ peradd = len(self.local.htable.tables)
+ self.local._LocalExporter__dealwith_histogram(self.tags, self.key, hist_val)
+ postadd = len(self.local.htable.tables)
+
+ self.assertEqual(postadd - peradd, 1)
+
+
+ def test__dealwith_hll(self):
+ value = "AQUBDECDAQxAQQUIIEEJCDCFARgQRAUIMIMAAAECAAAAAAAAAA=="
+
+ peradd = len(self.local.hlltable.columns)
+ self.local._LocalExporter__dealwith_hll(self.tags, self.key, value)
+ postadd = len(self.local.hlltable.columns)
+
+ self.assertEqual(postadd - peradd, 1)
+
+
+ def test__parse_json_tags(self):
+ tags = self.local._LocalExporter__parse_json_tags(self.counter_json_object)
+ self.assertEqual(tags, "{\"send_log\": \"sum\", \"app_name\": \"-\"}")
+
+
+ def test__get_counter_speed_value(self):
+ speed_s = self.local._LocalExporter__get_counter_speed_value("T_fail_log", self.counter_json_object["fields"], self.counter_json_object)
+ self.assertEqual(speed_s, 2)
+
+ def test__match_tags(self):
+ self.local.match_tags = {}
+ tags = {"action": "deny", "policy_id": 0, "hit_rate": 1.1}
+ ret = self.local._LocalExporter__match_tags(tags)
+ self.assertEqual(ret, True)
+
+ self.local.match_tags = {"action": "deny"}
+ tags = {}
+ ret = self.local._LocalExporter__match_tags(tags)
+ self.assertEqual(ret, False)
+
+ self.local.match_tags = {"action": "deny", "policy_id": 0, "hit_rate": 1.1}
+ tags = {"action": "deny"}
+ ret = self.local._LocalExporter__match_tags(tags)
+ self.assertEqual(ret, False)
+
+ self.local.match_tags = {"action": "deny", "policy_id": 0, "hit_rate": 1.10}
+ tags = {"action": "deny", "policy_id": 0, "hit_rate": 1.1}
+ ret = self.local._LocalExporter__match_tags(tags)
+ self.assertEqual(ret, True)
+
+ def test__is_counter_table(self):
+ json_data_0 = [{ "name": "-",
+ "tags": {
+ "send_log": "sum"
+ },
+ "fields": {
+ "T_fail_log": 2,
+ "T_fail_log_delta": 2
+ },
+ "timestamp_ms": 1000,
+ "timestamp_ms_delta": 0
+ }]
+ ret = self.local._LocalExporter__is_counter_table(json_data_0)
+ self.assertEqual(ret, False)
+
+ json_data_1 = [{ "name": "-",
+ "tags": {
+ "send_log": "sum"
+ },
+ "fields": {
+ "T_fail_log": 2,
+ "T_fail_log_delta": 2
+ },
+ "timestamp_ms": 1000,
+ "timestamp_ms_delta": 0
+ },
+ { "name": "-",
+ "tags": {
+ "send_log": "sum"
+ },
+ "fields": {
+ "T_fail_log": 2,
+ "T_fail_log_delta": 2
+ },
+ "timestamp_ms": 1000,
+ "timestamp_ms_delta": 0
+ }]
+
+ self.local.match_tags = {"policy": 1}
+ ret = self.local._LocalExporter__is_counter_table(json_data_1)
+ self.assertEqual(ret, True)
+
+ self.local.match_tags = {"send_log": "sum"}
+ ret = self.local._LocalExporter__is_counter_table(json_data_1)
+ self.assertEqual(ret, False)
+
+ json_data_2 = [{ "name": "-",
+ "tags": {
+ "send_log": "sum"
+ },
+ "fields": {
+ "T_fail_log": 2,
+ "T_fail_log_delta": 2
+ },
+ "timestamp_ms": 1000,
+ "timestamp_ms_delta": 0
+ },
+ { "name": "-",
+ "tags": {
+ "send_log_0": "sum"
+ },
+ "fields": {
+ "T_fail_log": 2,
+ "T_fail_log_delta": 2
+ },
+ "timestamp_ms": 1000,
+ "timestamp_ms_delta": 0
+ }]
+ self.local.match_tags = {}
+ ret = self.local._LocalExporter__is_counter_table(json_data_2)
+ self.assertEqual(ret, False)
+
+ json_data_3 = [{ "name": "-",
+ "tags": {
+ "send_log": "sum"
+ },
+ "fields": {
+ "T_fail_log": 2,
+ "T_fail_log_delta": 2
+ },
+ "timestamp_ms": 1000,
+ "timestamp_ms_delta": 0
+ },
+ { "name": "-",
+ "tags": {
+ "send_log": "firewall"
+ },
+ "fields": {
+ "T_fail_log": 2,
+ "T_fail_log_delta": 2
+ },
+ "timestamp_ms": 1000,
+ "timestamp_ms_delta": 0
+ }]
+ self.local.match_tags = {}
+ ret = self.local._LocalExporter__is_counter_table(json_data_3)
+ self.assertEqual(ret, True)
+
+ def test__parse_json_object(self):
+ #counter json object
+ peradd = len(self.local.ctable.columns)
+ self.local._LocalExporter__parse_json_object(self.counter_json_object)
+ postadd = len(self.local.ctable.columns)
+ self.assertEqual(postadd - peradd, 1)
+ #histogram json object
+ peradd = len(self.local.htable.tables)
+ self.local._LocalExporter__parse_json_object(self.hist_json_object)
+ postadd = len(self.local.htable.tables)
+ self.assertEqual(postadd - peradd, 1)
+ #hll json object
+ peradd = len(self.local.hlltable.columns)
+ self.local._LocalExporter__parse_json_object(self.hll_json_object)
+ postadd = len(self.local.hlltable.columns)
+ self.assertEqual(postadd - peradd, 1)
+
+ def test_parse_data(self):
+ self.local.hlltable = CounterTable()
+ self.local.ctable = CounterTable()
+ self.local.htable = HistogramTable()
+
+ self.local.json_path = "/tmp/noexist.json"
+ self.local.parse_data()
+ self.assertEqual(len(self.local.ctable.columns), 0)
+ self.assertEqual(len(self.local.htable.tables), 0)
+ self.assertEqual(len(self.local.hlltable.columns), 0)
+
+ self.local.json_path = FIELDSTAT_INPUT_JSON_PATH
+ self.local.parse_data()
+ self.assertEqual(len(self.local.ctable.columns), 18)
+ self.assertEqual(len(self.local.htable.tables), 8)
+ self.assertEqual(len(self.local.hlltable.columns), 3)
+
+
+class TestFieldstatExporter(unittest.TestCase):
+ def setUp(self):
+ self.exporter = FieldstatExporter()
+
+
+ def test_build_shared_args_parser(self):
+ parser = self.exporter._FieldstatExporter__build_shared_args_parser()
+
+ self.assertEqual(parser.parse_args([]).hist_bins, "0.1,0.5,0.8,0.9,0.95,0.99")
+ self.assertEqual(parser.parse_args([]).hist_format, "summary")
+ self.assertEqual(parser.parse_args([]).json_path, "./fieldstat.json")
+
+ self.assertEqual(parser.parse_args(["-b", "0.1,0.5,0.8,0.99"]).hist_bins, "0.1,0.5,0.8,0.99")
+ self.assertEqual(parser.parse_args(["-f", "histogram"]).hist_format, "histogram")
+ self.assertEqual(parser.parse_args(["-j", "/tmp/sapp_fs.json"]).json_path, "/tmp/sapp_fs.json")
+
+
+ def test_build_prom_parser(self):
+ parser = argparse.ArgumentParser(description='Fieldstat exporter')
+ shared_args_parser = self.exporter._FieldstatExporter__build_shared_args_parser()
+ subparsers = parser.add_subparsers(dest='command')
+ subparsers.required = True
+ self.exporter._FieldstatExporter__build_prom_parser(subparsers, shared_args_parser)
+
+ args = parser.parse_args(["prometheus"])
+ self.assertEqual(args.listen_port, 8080)
+ self.assertEqual(args.uri_path, "/metrics")
+
+ args = parser.parse_args(["prometheus", "-p", "40000", "-u", "/sapp"])
+ self.assertEqual(args.listen_port, 40000)
+ self.assertEqual(args.uri_path, "/sapp")
+
+
+ def test_build_local_parser(self):
+ parser = argparse.ArgumentParser(description='Fieldstat exporter')
+ shared_args_parser = self.exporter._FieldstatExporter__build_shared_args_parser()
+ subparsers = parser.add_subparsers(dest='command')
+ subparsers.required = True
+ self.exporter._FieldstatExporter__build_local_parser(subparsers, shared_args_parser)
+
+ args = parser.parse_args(["local"])
+ self.assertEqual(args.interval, 1)
+ self.assertEqual(args.loop, False)
+ self.assertEqual(args.clear_screen, False)
+
+ args = parser.parse_args(["local", "--loop", "--clear-screen", "-i", "1000", "--display-hist", "--display-hll", "--display-counter", "--match-tags", "policy:1,rule:1"])
+ self.assertEqual(args.interval, 1000)
+ self.assertEqual(args.loop, True)
+ self.assertEqual(args.clear_screen, True)
+ self.assertEqual(args.display_counter, True)
+ self.assertEqual(args.display_hist, True)
+ self.assertEqual(args.display_hll, True)
+ self.assertEqual(args.match_tags, "policy:1,rule:1")
+
+
+ def test_parse_bins_str(self):
+ bins_str = "0.1,1,10,20,50,80,90,99"
+ bins = self.exporter._FieldstatExporter__parse_bins_str(bins_str)
+ self.assertEqual(bins, [0.1, 1.0, 10.0, 20.0, 50.0, 80.0, 90.0, 99.0])
+
+ def test_parse_tags_str(self):
+ tags_str = "policy:1,rule:intercept"
+ tags = self.exporter._FieldstatExporter__parse_tags_str(tags_str)
+ self.assertEqual(tags, {'policy': 1, 'rule': 'intercept'})
+
+ def test_read_shared_args_value(self):
+ parser = self.exporter._FieldstatExporter__build_shared_args_parser()
+
+ args = parser.parse_args()
+ self.exporter._FieldstatExporter__read_shared_args_value(args)
+
+ self.assertEqual(FieldstatExporterVars.hist_format, "summary")
+ self.assertEqual(FieldstatExporterVars.hist_bins, [0.1, 0.5, 0.8, 0.9, 0.95, 0.99])
+ self.assertEqual(FieldstatExporterVars.json_path, "./fieldstat.json")
+
+ args = parser.parse_args(["-f", "histogram", "-b", "1,2,3,4,5", "-j", FIELDSTAT_INPUT_JSON_PATH])
+ self.exporter._FieldstatExporter__read_shared_args_value(args)
+
+ self.assertEqual(FieldstatExporterVars.hist_format, "histogram")
+ self.assertEqual(FieldstatExporterVars.hist_bins, [1.0, 2.0, 3.0, 4.0, 5.0])
+ self.assertEqual(FieldstatExporterVars.json_path, FIELDSTAT_INPUT_JSON_PATH)
+
+
+ def test_read_private_args_value(self):
+ parser = argparse.ArgumentParser(description='Fieldstat exporter')
+ shared_args_parser = self.exporter._FieldstatExporter__build_shared_args_parser()
+ subparsers = parser.add_subparsers(dest='command')
+ subparsers.required = True
+ self.exporter._FieldstatExporter__build_prom_parser(subparsers, shared_args_parser)
+ self.exporter._FieldstatExporter__build_local_parser(subparsers, shared_args_parser)
+
+ args = parser.parse_args(["prometheus"])
+ self.exporter._FieldstatExporter__read_private_args_value(args)
+
+ self.assertEqual(FieldstatExporterVars.prom_uri_path, "/metrics")
+ self.assertEqual(self.exporter.exporter_mode, "prometheus")
+ self.assertEqual(self.exporter.prom_listen_port, 8080)
+
+ args = parser.parse_args(["prometheus", "-p", "40000", "-u", "/sapp"])
+ self.exporter._FieldstatExporter__read_private_args_value(args)
+
+ self.assertEqual(FieldstatExporterVars.prom_uri_path, "/sapp")
+ self.assertEqual(self.exporter.exporter_mode, "prometheus")
+ self.assertEqual(self.exporter.prom_listen_port, 40000)
+
+ args = parser.parse_args(["local"])
+ self.exporter._FieldstatExporter__read_private_args_value(args)
+
+ self.assertEqual(self.exporter.exporter_mode, "local")
+ self.assertEqual(self.exporter.local_interval_s, 1)
+ self.assertEqual(self.exporter.local_enable_loop, False)
+ self.assertEqual(self.exporter.local_clear_screen, False)
+
+
+ args = parser.parse_args(["local", "-i", "100", "-l", "--clear-screen"])
+ self.exporter._FieldstatExporter__read_private_args_value(args)
+
+ self.assertEqual(self.exporter.exporter_mode, "local")
+ self.assertEqual(self.exporter.local_interval_s, 100)
+ self.assertEqual(self.exporter.local_enable_loop, True)
+ self.assertEqual(self.exporter.local_clear_screen, True)
+
+
+ def test_verify_cmd_args(self):
+ parser = argparse.ArgumentParser(description='Fieldstat exporter')
+ shared_args_parser = self.exporter._FieldstatExporter__build_shared_args_parser()
+ subparsers = parser.add_subparsers(dest='command')
+ subparsers.required = True
+ self.exporter._FieldstatExporter__build_prom_parser(subparsers, shared_args_parser)
+ self.exporter._FieldstatExporter__build_local_parser(subparsers, shared_args_parser)
+
+ args = parser.parse_args(["prometheus", "-f", "test"])
+ ret = self.exporter._FieldstatExporter__verify_cmd_args(args)
+ self.assertEqual(ret, -1)
+
+ args = parser.parse_args(["prometheus", "-b", "001,002,003,004", "-f", "summary"])
+ ret = self.exporter._FieldstatExporter__verify_cmd_args(args)
+ self.assertEqual(ret, -1)
+
+ args = parser.parse_args(["prometheus", "-b", "0.1,0.2,0.3,0.4", "-f", "histogram"])
+ ret = self.exporter._FieldstatExporter__verify_cmd_args(args)
+ self.assertEqual(ret, -1)
+
+ args = parser.parse_args(["prometheus", "-p", "800"])
+ ret = self.exporter._FieldstatExporter__verify_cmd_args(args)
+ self.assertEqual(ret, -1)
+
+ args = parser.parse_args(["prometheus", "-p", "80000"])
+ ret = self.exporter._FieldstatExporter__verify_cmd_args(args)
+ self.assertEqual(ret, -1)
+
+ args = parser.parse_args(["local", "-m", "test"])
+ ret = self.exporter._FieldstatExporter__verify_cmd_args(args)
+ self.assertEqual(ret, -1)
+
+
+ @patch('sys.argv', ['fieldstat.py', 'prometheus'])
+ def test_read_cmd_options_promtheus(self):
+ self.exporter.read_cmd_options()
+
+ self.assertEqual(FieldstatExporterVars.hist_format, "summary")
+ self.assertEqual(FieldstatExporterVars.hist_bins, [0.1, 0.5, 0.8, 0.9, 0.95, 0.99])
+ self.assertEqual(FieldstatExporterVars.json_path, "./fieldstat.json")
+
+ self.assertEqual(FieldstatExporterVars.prom_uri_path, "/metrics")
+ self.assertEqual(self.exporter.prom_listen_port, 8080)
+ self.assertEqual(self.exporter.exporter_mode, "prometheus")
+
+
+ @patch('sys.argv', ['fieldstat.py', 'local'])
+ def test_read_cmd_options_local(self):
+ self.exporter.read_cmd_options()
+ self.assertEqual(FieldstatExporterVars.hist_format, "summary")
+ self.assertEqual(FieldstatExporterVars.hist_bins, [0.1, 0.5, 0.8, 0.9, 0.95, 0.99])
+ self.assertEqual(FieldstatExporterVars.json_path, "./fieldstat.json")
+ self.assertEqual(FieldstatExporterVars.local_display_hist, False)
+ self.assertEqual(FieldstatExporterVars.local_display_hll, False)
+ self.assertEqual(FieldstatExporterVars.local_display_counter, False)
+ self.assertEqual(FieldstatExporterVars.local_match_tags, {})
+
+ self.assertEqual(self.exporter.exporter_mode, "local")
+ self.assertEqual(self.exporter.local_interval_s, 1)
+ self.assertEqual(self.exporter.local_enable_loop, False)
+ self.assertEqual(self.exporter.local_clear_screen, False)
+
+
+if __name__ == '__main__':
+ unittest.main() \ No newline at end of file
diff --git a/test/test_prometheus_exporter.py b/test/test_prometheus_exporter.py
deleted file mode 100644
index 82f6b2e..0000000
--- a/test/test_prometheus_exporter.py
+++ /dev/null
@@ -1,100 +0,0 @@
-import unittest
-import sys
-import urllib
-from urllib.request import urlopen
-from threading import Thread
-from http.server import HTTPServer, BaseHTTPRequestHandler
-
-sys.path.append('../src/exporter')
-from prometheus_exporter import PrometheusExporterVars
-from prometheus_exporter import BuildPrometheusMetrics
-from prometheus_exporter import PrometheusEndpoint
-from prometheus_exporter import PrometheusExporter
-
-class TestBuildPrometheusMetrics(unittest.TestCase):
- def setUp(self):
- self.builder = BuildPrometheusMetrics()
-
- def test_escape_metric_name(self):
- metric_name = "tsg_master_log:(){}/\\%*$-,;"
- escaped = self.builder._BuildPrometheusMetrics__escape_metric_name(metric_name)
- self.assertEqual(escaped, "tsg_master_log:____________")
-
-
- def test_escape_metric_tags(self):
- #regex: [a-zA-Z_][a-zA-Z0-9_]*
- json_dict = {'name': '-', 'tags': {'send_log': 'sum', 'policy_id': 1, 'quanlity': 0.5},
- 'fields': {'T_success_log': 1}, 'timestamp': 1694657637836}
- tags = self.builder._BuildPrometheusMetrics__escape_metric_tags(json_dict)
- self.assertEqual(tags, "{send_log=\"sum\",policy_id=\"1\",quanlity=\"0.5\",app_name=\"-\"}")
-
-
- def test_build_one_metric(self):
- name = "send_log"
- tags = "{app_name=\"firewall\"}"
- value = 100
-
- one_metric = self.builder._BuildPrometheusMetrics__build_one_metric(name, tags, value)
- self.assertEqual(one_metric, "send_log{app_name=\"firewall\"} 100\n")
-
-
- def test_build_metrics_payload(self):
- payload = self.builder.build_metrics_payload()
- self.assertEqual(payload, "")
-
- self.builder.json_path = "/tmp/t.json"
- payload = self.builder.build_metrics_payload()
- self.assertNotEqual(payload, "")
-
-
-class TestPrometheusEndpoint(unittest.TestCase):
- @classmethod
- def setUpClass(cls):
- PrometheusExporterVars.uri_path = "/metrics"
- server_address = ('', 40001)
- cls.httpd = HTTPServer(server_address, PrometheusEndpoint)
- cls.server_thread = Thread(target=cls.httpd.serve_forever)
- cls.server_thread.start()
-
- @classmethod
- def tearDownClass(cls):
- cls.httpd.shutdown()
- cls.httpd.server_close()
- cls.server_thread.join()
-
- def test_valid_request(self):
- response = urlopen('http://localhost:40001/metrics')
- print(response.getcode())
- self.assertEqual(response.getcode(), 200)
-
- def test_invalid_request(self):
- try:
- urlopen('http://localhost:40001/invalid')
- except urllib.error.HTTPError as e:
- self.assertEqual(e.code, 404)
-
-
-class TestPrometheusExporter(unittest.TestCase):
- def setUp(self):
- self.exporter = PrometheusExporter()
-
- def test_verify_cmd_args_valid(self):
- result = self.exporter._PrometheusExporter__verify_cmd_args()
- self.assertEqual(result, 0)
-
- def test_verify_cmd_args_invalid(self):
- self.exporter.listen_port = 1000
- result = self.exporter._PrometheusExporter__verify_cmd_args()
- self.assertEqual(result, -1)
-
- def test_read_cmd_options(self):
- self.exporter.read_cmd_options()
- self.assertEqual(self.exporter.listen_port, 8080)
- self.assertEqual(PrometheusExporterVars.hist_format, "summary")
- self.assertEqual(PrometheusExporterVars.hist_bins, "0.1,0.5,0.8,0.9,0.95,0.99")
- self.assertEqual(PrometheusExporterVars.json_path, "./fieldstat.json")
- self.assertEqual(PrometheusExporterVars.uri_path, "/metrics")
-
-
-if __name__ == '__main__':
- unittest.main() \ No newline at end of file