summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorfumingwei <[email protected]>2023-11-07 19:06:13 +0800
committerfumingwei <[email protected]>2023-11-14 18:42:00 +0800
commit6dca269e662f064cdfccde0afcbc7ef600b11ea6 (patch)
tree2f61e21c9bf3ee48353a448a7e648839208e7e16
parentc36286e590e3a169c011a2a15c2eb4e46785865d (diff)
feature:TSG-17564:Adapt to json format changes and add template argument.v4.4.1
-rw-r--r--.gitlab-ci.yml2
-rw-r--r--readme_fieldstat_easy.md15
-rw-r--r--src/exporter/fieldstat_exporter.py537
-rw-r--r--test/test_fieldstat_exporter.py380
4 files changed, 513 insertions, 421 deletions
diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index 64e948d..ec630df 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -19,7 +19,7 @@ stages:
- yum install -y elfutils-libelf-devel
- yum install -y libuuid libuuid-devel
- yum install -y zlib
- - python3 -m pip install prettytable
+ - python3 -m pip install prettytable jinja2
.build_by_travis_for_centos7:
diff --git a/readme_fieldstat_easy.md b/readme_fieldstat_easy.md
index ee3af9c..96c88e7 100644
--- a/readme_fieldstat_easy.md
+++ b/readme_fieldstat_easy.md
@@ -115,6 +115,8 @@ optional arguments:
--disable-table disable display table format.
-m MATCH_TAGS, --match-tags MATCH_TAGS
Display the tags match metrics
+ -t TEMPLATE, --template TEMPLATE
+ Specify the print template with jinja2.
```
The local exporter optional arguments default values.
args|default value
@@ -129,6 +131,19 @@ args|default value
--display-hist|False
--display-counter|False
-m, --match-tags|""
+-t, --template| ""
+
+## Export table format with Local Exporter
+### table format
+Export the metrics of type counter in the form of a determinant.
+### Command line arguments
+We can use the template argument to exporter table format. The template argument include the follow functions:
+functions|description|jinja2 example
+---|---|---
+print_tables(groupby, columns)|Groupby specifies the tag key as the row of the table. Column specifies the field key as the column of the table.| '{{ print_tables("send_log", ["T_success_log"]) }}{{"\n"}}{{ print_tables("groupby", ["T_fail_log"]) }}'
+print_counters(field_keys)| Export counter metrics specified by the field keys.|'{{ print_counters(["T_success_log"]) }}'
+print_histograms(field_keys)| Export histogram metrics specified by the field keys.|'{{ print_histograms(["list_num"]) }}'
+print_hlls(field_keys)| Export hyperloglog metrics specified by the field keys.| '{{ print_hlls(["external_ip"]) }}'
# fieldstat easy
diff --git a/src/exporter/fieldstat_exporter.py b/src/exporter/fieldstat_exporter.py
index 20a2c5a..cdb7f1d 100644
--- a/src/exporter/fieldstat_exporter.py
+++ b/src/exporter/fieldstat_exporter.py
@@ -10,8 +10,10 @@ import shutil
import datetime
import time
import copy
+import fcntl
from prettytable import PrettyTable,NONE,HEADER
from http.server import HTTPServer, BaseHTTPRequestHandler
+from jinja2 import Environment, FileSystemLoader
import ctypes
@@ -62,7 +64,7 @@ class FieldstatExporterVars:
local_display_hist = False
local_display_hll = False
local_match_tags = {}
- local_disable_table = True
+ local_template = ""
prom_uri_path = ""
@@ -158,8 +160,6 @@ class PrometheusExporter:
for key,value in json_dict["fields"].items():
escaped_name = self.__escape_metric_name(key)
if isinstance(value, int):
- if key.endswith("_delta"):
- continue
metrics += self.__build_type_counter(escaped_name, escaped_tags, value)
else:
is_hll = FieldstatAPI.libfieldstat.fieldstat_is_hll(value.encode('utf-8'))
@@ -177,7 +177,9 @@ class PrometheusExporter:
return payload
with open(self.json_path) as file:
+ fcntl.flock(file, fcntl.LOCK_EX)
json_data = json.load(file)
+ fcntl.flock(file, fcntl.LOCK_UN)
for item in json_data:
payload += self.__build_metrics(item)
@@ -225,47 +227,7 @@ class CounterTable:
self.min_width = self.INFO_COLUMN_WIDTH
self.tables = []
self.columns = []
- self.rows = []
self.field_names = []
- self.rows = []
-
-
- def create_row_table(self, fields):
- field_names = []
- sorted_keys = sorted(fields.keys())
- field_names.append("")
- field_names.extend(sorted_keys)
-
- table = PrettyTable()
- table.vrules = NONE
- table.hrules = NONE
- table.field_names = field_names
-
- for item in field_names:
- table.align[item] = "r"
- table.align[""] = "l"
-
- self.tables.append(table)
-
- return table
-
-
- def add_row_table_row(self, table, tags, fields):
- row = []
-
- if table is None:
- return
-
- #exporter table row name.
- for key, value in tags.items():
- row.append("%s_%s" % (key, str(value)))
-
- #exporter table row value.
- sorted_keys = sorted(fields.keys())
- for key in sorted_keys:
- row.append(fields[key])
-
- table.add_row(row)
def add_table_column(self, tags, head, value, speed_s):
@@ -287,7 +249,7 @@ class CounterTable:
return table
- def __build_columns_tables(self):
+ def build_columns_tables(self):
# One table print in screen size
# One table per screen size
n_columns = len(self.columns)
@@ -297,7 +259,6 @@ class CounterTable:
table_size = self.column_size // (self.min_width + self.COLUMM_PADDING)
if 0 == table_size:
table_size = 1
-
for i in range(0, n_columns, table_size):
table = None
@@ -321,56 +282,134 @@ class CounterTable:
return len(self.tables)
def print_tables(self):
- self.__build_columns_tables()
+ self.build_columns_tables()
for item in self.tables:
print(item)
-class HistogramTable:
+class TableFormatTable:
def __init__(self):
- self.format = FieldstatExporterVars.hist_format
- self.bins = FieldstatExporterVars.hist_bins
self.tables = []
+ self.rows = []
- def __build_summary_format(self, c_hist, table):
- for i in self.bins:
- head = "{:.2f}%".format(i * 100)
- row = FieldstatAPI.libfieldstat.fieldstat_histogram_value_at_percentile(c_hist, float(i * 100))
- table.add_column(head, [row])
+ def create_table(self, fields):
+ field_names = []
+ sorted_keys = sorted(fields.keys())
+ field_names.append("")
+ field_names.extend(sorted_keys)
- def __build_histogram_format(self, c_hist, table):
- for i in self.bins:
- head = "le={:d}".format(int(i))
- row = FieldstatAPI.libfieldstat.fieldstat_histogram_value_at_percentile(c_hist, int(i))
- table.add_column(head, [row])
+ table = PrettyTable()
+ table.vrules = NONE
+ table.hrules = NONE
+ table.field_names = field_names
+
+ for item in field_names:
+ table.align[item] = "r"
+ table.align[""] = "l"
+
+ self.tables.append(table)
+
+ return table
+
+
+ def add_table_row(self, table, tags, fields):
+ row = []
+
+ if table is None:
+ return
+
+ #exporter table row name.
+ for key, value in tags.items():
+ row.append("%s_%s" % (key, str(value)))
+
+ #exporter table row value.
+ sorted_keys = sorted(fields.keys())
+ for key in sorted_keys:
+ row.append(fields[key])
+
+ table.add_row(row)
+
+
+ def print_tables(self):
+ for item in self.tables:
+ print(item)
- def build_table(self, tags, key, value):
- table = PrettyTable()
- c_hist = FieldstatAPI.libfieldstat.fieldstat_histogram_base64_decode(value.encode('utf-8'))
- if self.format == "summary":
- self.__build_summary_format(c_hist, table)
- if self.format == "histogram":
- self.__build_histogram_format(c_hist, table)
+class HistogramTable:
+ def __init__(self):
+ self.format = FieldstatExporterVars.hist_format
+ self.bins = FieldstatExporterVars.hist_bins
+ self.tables = []
+
+ def __get_row_shared_values(self, c_hist):
+ shared_values = []
max_value = FieldstatAPI.libfieldstat.fieldstat_histogram_value_max(c_hist)
- table.add_column("MAX", [max_value])
-
+ shared_values.append(str(max_value))
+
min_value = FieldstatAPI.libfieldstat.fieldstat_histogram_value_min(c_hist)
- table.add_column("MIN", [min_value])
-
+ shared_values.append(str(min_value))
+
avg_value = FieldstatAPI.libfieldstat.fieldstat_histogram_value_mean(c_hist)
- table.add_column("AVG", ["{:.2f}".format(avg_value)])
-
+ shared_values.append("{:.2f}".format(avg_value))
+
dev_value = FieldstatAPI.libfieldstat.fieldstat_histogram_value_stddev(c_hist)
- table.add_column("STDDEV", ["{:.2f}".format(dev_value)])
+ shared_values.append("{:.2f}".format(dev_value))
cnt_value = FieldstatAPI.libfieldstat.fieldstat_histogram_value_total_count(c_hist)
- table.add_column("CNT", [cnt_value])
+ shared_values.append(str(cnt_value))
+
+ return shared_values
+
+ def __get_row_values(self, c_hist):
+ row_values = []
+ for i in self.bins:
+ if self.format == "summary":
+ value = FieldstatAPI.libfieldstat.fieldstat_histogram_value_at_percentile(c_hist, float(i * 100))
+ row_values.append(str(value))
+ if self.format == "histogram":
+ value = FieldstatAPI.libfieldstat.fieldstat_histogram_count_le_value(c_hist, int(i))
+ row_values.append(str(value))
+ shared_values = self.__get_row_shared_values(c_hist)
+ row_values += shared_values
+ return row_values
+
+ def __add_table_field_names(self, table):
+ field_names = []
+ field_names.append("")
+ for i in self.bins:
+ if self.format == "summary":
+ field_names.append("{:.2f}%".format(i * 100))
+ if self.format == "histogram":
+ field_names.append("le={:d}".format(int(i)))
+ field_names += ["MAX", "MIN", "AVG", "STDDEV", "CNT"]
+ table.field_names = field_names
+ for item in field_names:
+ table.min_width[item] = 10
+ if item == "":
+ table.align[item] = "l"
+ else:
+ table.align[item] = "r"
+ def __add_table_row(self, table, hist_value, row_name):
+ if hist_value is None:
+ return
+ c_hist = FieldstatAPI.libfieldstat.fieldstat_histogram_base64_decode(hist_value.encode('utf-8'))
+ row = [row_name] + self.__get_row_values(c_hist)
FieldstatAPI.libfieldstat.fieldstat_histogram_free(c_hist)
+ table.add_row(row)
+
+ def build_table(self, tags, key, hist_value, hist_value_delta, interval_ms):
+ table = PrettyTable()
+ table.vrules = NONE
+ # table.hrules = NONE
+ table.title = key + " " + tags + " "
+ if interval_ms > 0:
+ table.title += "interval: {:d}ms".format(int(interval_ms))
+ self.__add_table_field_names(table)
+ self.__add_table_row(table, hist_value, "acc")
+ self.__add_table_row(table, hist_value_delta, "delta")
- table.title = key + " " + tags
self.tables.append(table)
def read_tables_num(self):
@@ -388,46 +427,44 @@ class LocalExporter:
self.ctable = CounterTable()
self.htable = HistogramTable()
self.hlltable = CounterTable()
+ self.tftable = TableFormatTable()
self.display_counter = FieldstatExporterVars.local_display_counter
self.display_hist = FieldstatExporterVars.local_display_hist
self.display_hll = FieldstatExporterVars.local_display_hll
self.match_tags = FieldstatExporterVars.local_match_tags
- self.disable_table = FieldstatExporterVars.local_disable_table
+ self.template = FieldstatExporterVars.local_template
self.__set_default_display()
+ self.objects_matched = []
+ self.template_ja2 = None
+
def __set_default_display(self):
- #default print all type metrics
if not (self.display_counter or self.display_hist or self.display_hll):
self.display_counter = True
self.display_hist = True
self.display_hll = True
- def __dealwith_counter(self, tags, key, value, speed_s):
+
+ def __build_counter_type_exporter(self, tags, key, value, value_delta, timestamp_ms_delta):
+ speed_s = 0.0
+ if value_delta is not None:
+ speed_s = value_delta * 1000 / timestamp_ms_delta
self.ctable.add_table_column(tags, key, value, speed_s)
- def __dealwith_histogram(self, tags, key, value):
- self.htable.build_table(tags, key, value)
+ def __build_histogram_type_exporter(self, tags, key, value, value_delta, timestamp_ms_delta):
+ self.htable.build_table(tags, key, value, value_delta, timestamp_ms_delta)
- def __dealwith_hll(self, tags, key, value):
+ def __build_hll_type_exporter(self, tags, key, value):
hll_value = FieldstatAPI.libfieldstat.fieldstat_hll_base64_to_count(value.encode('utf-8'))
self.hlltable.add_table_column(tags, key, "{:.2f}".format(hll_value), 0)
- def __parse_json_tags(self, json_object):
+ def __append_app_name_to_tags(self, json_object):
tags_dict = copy.deepcopy(json_object["tags"])
tags_dict.update({"app_name": json_object["name"]})
return json.dumps(tags_dict)
- def __get_counter_speed_value(self, key, fields, json_object):
- delta_key = key + "_delta"
- if delta_key in fields:
- delta_val = fields[delta_key]
- delta_ms = json_object["timestamp_ms"] - json_object["timestamp_ms_delta"]
- speed_s = delta_val * 1000 / delta_ms
- return speed_s
- return 0
-
- def __match_tags(self, tags):
+ def __is_tags_matched(self, tags):
if len(self.match_tags) == 0:
return True
@@ -442,146 +479,250 @@ class LocalExporter:
return False
if value != tags[key]:
return False
-
- return True
-
- def __generate_table_bundle(self, json_objects):
- table_bundle = {}
- table_bundle["not_table_field"] = []
-
- for item in json_objects:
- #set display table option off
- if self.disable_table:
- table_bundle["not_table_field"].append(item)
- continue
- # table: only one tag and same tag key + same field keys + name key
- not_append_table = False
- if len(item["tags"]) != 1:
- table_bundle["not_table_field"].append(item)
- continue
-
- for _, value in item["fields"].items():
- if isinstance(value, str):
- not_append_table = True
- break
-
- if not_append_table == True:
- table_bundle["not_table_field"].append(item)
- continue
+ return True
- key_list = list(item["tags"].keys()) + sorted(list(item["fields"].keys()))
- key = ''.join(key_list) + item["name"]
-
- if key in table_bundle:
- val = table_bundle[key]
- val.append(item)
- else:
- table_bundle[key] = [item]
- return table_bundle
+ def read_json_objects_from_file(self):
+ #check source json file is exist.
+ objects = []
+ if not os.path.exists(self.json_path):
+ logging.error("Path: {%s} does not exist", self.json_path)
+ return objects
+ with open(self.json_path) as fd:
+ fcntl.flock(fd, fcntl.LOCK_EX)
+ objects = json.load(fd)
+ fcntl.flock(fd, fcntl.LOCK_UN)
+ return objects
- def __read_match_tags_objects(self, json_objects):
- match_objects = []
+ def read_match_tags_json_objects(self, json_objects):
+ matched_objects = []
for item in json_objects:
tags = item["tags"]
#not match tags object. not read.
- if not self.__match_tags(tags):
+ if not self.__is_tags_matched(tags):
continue
#match tags object. delete matching tags.
for key,value in self.match_tags.items():
if key in tags and value == tags[key]:
tags.pop(key, None)
- match_objects.append(item)
+ matched_objects.append(item)
+
+ self.objects_matched = matched_objects
+
+ return matched_objects
+
+ # def build_table_format_htable(self, json_objects):
+ # table_bundle = {}
+ # table_bundle["not_table_format"] = []
+
+ # for item in json_objects:
+ # #set display table option off
+ # if self.disable_table:
+ # table_bundle["not_table_format"].append(item)
+ # continue
+ # # table: only one tag and same tag key + same field keys + name key
+ # not_append_table = False
+ # if len(item["tags"]) != 1:
+ # table_bundle["not_table_format"].append(item)
+ # continue
+
+ # for _, value in item["fields"].items():
+ # if isinstance(value, str):
+ # not_append_table = True
+ # break
- return match_objects
+ # if not_append_table == True:
+ # table_bundle["not_table_format"].append(item)
+ # continue
- def __parse_table_json_object(self, json_objects):
+ # key_list = list(item["tags"].keys()) + sorted(list(item["fields"].keys()))
+ # key = ''.join(key_list) + item["name"]
+
+ # if key in table_bundle:
+ # val = table_bundle[key]
+ # val.append(item)
+ # else:
+ # table_bundle[key] = [item]
+
+ # return table_bundle
+
+
+ def build_not_table_format_exporter(self, json_objects):
+ for item in json_objects:
+ timestamp_ms_delta = item["timestamp_ms_delta"]
+ fields_delta = item["fields_delta"]
+ tags_new = self.__append_app_name_to_tags(item)
+
+ for key,value in item["fields"].items():
+ value_delta = None
+ if key in fields_delta:
+ value_delta = fields_delta[key]
+ if not isinstance(value, str):
+ self.__build_counter_type_exporter(tags_new, key, value, value_delta, timestamp_ms_delta)
+ else:
+ # histogram and hll type
+ is_hll = FieldstatAPI.libfieldstat.fieldstat_is_hll(value.encode('utf-8'))
+ if is_hll:
+ self.__build_hll_type_exporter(tags_new, key, value)
+ else:
+ self.__build_histogram_type_exporter(tags_new, key, value, value_delta, timestamp_ms_delta)
+
+
+ def build_table_format_exporter(self, json_objects):
table = None
for item in json_objects:
tags = item["tags"]
fields = item["fields"]
if table == None:
- table = self.ctable.create_row_table(fields)
- self.ctable.add_row_table_row(table, tags, fields)
+ table = self.tftable.create_table(fields)
+ self.tftable.add_table_row(table, tags, fields)
+ return table
- def __parse_single_json_object(self, json_object):
- tags = self.__parse_json_tags(json_object)
- fields = json_object["fields"]
- for key,value in fields.items():
- if not isinstance(value, str):
- #counter type
- if key.endswith("_delta"):
- continue
- speed_s = self.__get_counter_speed_value(key, fields, json_object)
- self.__dealwith_counter(tags, key, value, speed_s)
- else:
- # histogram and hll type
- is_hll = FieldstatAPI.libfieldstat.fieldstat_is_hll(value.encode('utf-8'))
- if is_hll:
- self.__dealwith_hll(tags, key, value)
- else:
- self.__dealwith_histogram(tags, key, value)
+ def export_templates(self):
+ env = Environment(loader=FileSystemLoader('templates'))
+ env.globals.update(print_tables =self.print_table_format)
+ env.globals.update(print_counters =self.print_counter_type)
+ env.globals.update(print_histograms=self.print_histogram_type)
+ env.globals.update(print_hlls =self.print_hll_type)
+ template = env.from_string(self.template)
+ self.template_ja2 = template
+ def build_local_exporter(self):
+ objects = self.read_json_objects_from_file()
+ self.objects_matched = self.read_match_tags_json_objects(objects)
- def parse_data(self):
- #check source json file is exist.
- if not os.path.exists(self.json_path):
- logging.error("Path: {%s} does not exist", self.json_path)
- return
+ if len(self.template) > 0:
+ self.export_templates()
+ else:
+ self.build_not_table_format_exporter(self.objects_matched)
- with open(self.json_path) as file:
- json_objects = json.load(file)
- #read match tags objects.
- match_objects = self.__read_match_tags_objects(json_objects)
- #generate tables dict.
- table_bundle = self.__generate_table_bundle(match_objects)
-
- for tkey,tval in table_bundle.items():
- if tkey == "not_table_field": # exporter single metrics.
- for item in tval:
- self.__parse_single_json_object(item)
- else: # exporter table-format metrics.
- self.__parse_table_json_object(tval)
-
-
- def __print_top_edge(self):
- timestamp = datetime.datetime.now().timestamp()
- formatted_time = datetime.datetime.fromtimestamp(timestamp).strftime('%a %b %d %H:%M:%S %Y')
-
- num_of_equals = (self.terminal_size - len(formatted_time)) // 2
-
- result = '=' * num_of_equals + formatted_time + '=' * num_of_equals
- print(result)
- def __print_bottom_edge(self):
- print('-' * self.terminal_size)
+ def print_table_format(self, groupby, columns):
+ table_fields=[]
+ for item in self.objects_matched:
+ is_print_table = True
+ tags = item["tags"]
+ fields = item["fields"]
+
+ if groupby not in tags:
+ continue
+ for column in columns:
+ if column not in fields:
+ is_print_table = False
+ break
+ if isinstance(fields[column], str):
+ is_print_table = False
+ continue
+ if is_print_table == False:
+ continue
+
+ new_fields = copy.deepcopy(item)
+
+ for key in fields:
+ if key not in columns:
+ new_fields["fields"].pop(key)
- def print_data(self):
- self.__print_top_edge()
+ for key in tags:
+ if groupby != key:
+ new_fields["tags"].pop(key)
- if self.display_counter:
- self.ctable.print_tables()
+ table_fields.append(new_fields)
+ return self.build_table_format_exporter(table_fields)
+
+# def print_counter(self, [tags], [fields])
+ def print_counter_type(self, field_keys):
+ counter_fields = []
+ for item in self.objects_matched:
+ fields = item["fields"]
- if self.display_hist:
- self.htable.print_tables()
+ new_fields = copy.deepcopy(item)
+
+ for key in fields:
+ if (key not in field_keys) or (isinstance(fields[key], str)):
+ new_fields["fields"].pop(key)
+ counter_fields.append(new_fields)
+ self.build_not_table_format_exporter(counter_fields)
+ self.ctable.build_columns_tables()
+ str_tables = []
+ for item in self.ctable.tables:
+ str_tables.append(item.get_string())
+ return "".join(str_tables)
+
+ def print_histogram_type(self, field_keys):
+ hist_fields = []
+ for item in self.objects_matched:
+ fields = item["fields"]
+ new_fields = copy.deepcopy(item)
+ for key in fields:
+ if (key not in field_keys) \
+ or (not isinstance(fields[key], str)) \
+ or (True == FieldstatAPI.libfieldstat.fieldstat_is_hll(fields[key].encode('utf-8'))):
+ new_fields["fields"].pop(key)
+ hist_fields.append(new_fields)
+ self.build_not_table_format_exporter(hist_fields)
+ str_tables = []
+ for item in self.htable.tables:
+ str_tables.append(item.get_string())
+ return "".join(str_tables)
+
+ def print_hll_type(self, field_keys):
+ hll_fields = []
+ for item in self.objects_matched:
+ fields = item["fields"]
+ new_fields = copy.deepcopy(item)
+ for key in fields:
+ if (key not in field_keys) \
+ or (not isinstance(fields[key], str)) \
+ or (False == FieldstatAPI.libfieldstat.fieldstat_is_hll(fields[key].encode('utf-8'))):
+ new_fields["fields"].pop(key)
+
+ hll_fields.append(new_fields)
+ self.build_not_table_format_exporter(hll_fields)
+ self.hlltable.build_columns_tables()
+ str_tables = []
+ for item in self.hlltable.tables:
+ str_tables.append(item.get_string())
+ return "".join(str_tables)
+
+
+ def print_local_exporter(self):
+ #The top edge
+ timestamp = datetime.datetime.now().timestamp()
+ formatted_time = datetime.datetime.fromtimestamp(timestamp).strftime('%a %b %d %H:%M:%S %Y')
+ num_of_equals = (self.terminal_size - len(formatted_time)) // 2
+ print('=' * num_of_equals + formatted_time + '=' * num_of_equals)
- if self.display_hll:
- self.hlltable.print_tables()
+ if len(self.template) > 0:
+ print(self.template_ja2.render())
+ else:
+ if self.display_counter:
+ self.ctable.print_tables()
+ print("=" * self.terminal_size)
+ self.tftable.print_tables()
+ print("=" * self.terminal_size)
- self.__print_bottom_edge()
+ if self.display_hist:
+ self.htable.print_tables()
+ print("=" * self.terminal_size)
+
+ if self.display_hll:
+ self.hlltable.print_tables()
+ print("=" * self.terminal_size)
+
+ #The bottom edge
+ print('-' * self.terminal_size)
@classmethod
def run_local_exporter(cls):
- praser = cls()
- praser.parse_data()
- praser.print_data()
-
+ exporter = cls()
+ exporter.build_local_exporter()
+ exporter.print_local_exporter()
################################################################################
@@ -635,10 +776,10 @@ class FieldstatExporter:
help = 'Display histogram type metrics')
parser.add_argument('--display-counter', action = 'store_true', default = False,
help = 'Display counter type metrics')
- parser.add_argument('--disable-table', action = 'store_true', default = False,
- help = 'disable display table format')
parser.add_argument("-m", "--match-tags", type = str, default = "",
help = "Display the tags match metrics")
+ parser.add_argument("-t", "--template", type = str, default = "",
+ help = "Specify the print template with jinja2.")
def __parse_bins_str(self, bins_str):
@@ -682,7 +823,7 @@ class FieldstatExporter:
FieldstatExporterVars.local_display_hll = args.display_hll
FieldstatExporterVars.local_display_hist = args.display_hist
FieldstatExporterVars.local_match_tags = self.__parse_tags_str(args.match_tags)
- FieldstatExporterVars.local_disable_table = args.disable_table
+ FieldstatExporterVars.local_template = args.template
self.exporter_mode = 'local'
self.local_interval_s = args.interval
self.local_enable_loop = args.loop
diff --git a/test/test_fieldstat_exporter.py b/test/test_fieldstat_exporter.py
index 562af4b..84909b7 100644
--- a/test/test_fieldstat_exporter.py
+++ b/test/test_fieldstat_exporter.py
@@ -29,6 +29,7 @@ from fieldstat_exporter import PrometheusEndpoint
from fieldstat_exporter import CounterTable
from fieldstat_exporter import HistogramTable
from fieldstat_exporter import LocalExporter
+from fieldstat_exporter import TableFormatTable
@@ -164,11 +165,13 @@ class TestPrometheusExporter(unittest.TestCase):
"quanlity": 0.50
},
"fields": {
- "T_success_log": 1,
- "T_success_log_delta": 1
+ "T_success_log": 1
+ },
+ "fields_delta": {
+ "T_success_log": 1
},
"timestamp_ms": 100010,
- "timestamp_ms_delta": 0
+ "timestamp_ms_delta": 1000
}
metrics = self.prom._PrometheusExporter__build_metrics(counter_dict)
self.assertEqual(metrics, "T_success_log{send_log=\"PROXY-EVENT\",policy_id=\"1\",quanlity=\"0.5\",app_name=\"-\"} 1\n")
@@ -259,30 +262,30 @@ class TestCounterTable(unittest.TestCase):
def setUp(self):
self.c_table = CounterTable()
- def test_create_row_table(self):
- fields_names_0 = {"column0": 0, "column1": 1}
- table = self.c_table.create_row_table(fields_names_0)
- self.assertEqual(table.field_names, ["", "column0", "column1"])
+ # def test_create_row_table(self):
+ # fields_names_0 = {"column0": 0, "column1": 1}
+ # table = self.c_table.create_row_table(fields_names_0)
+ # self.assertEqual(table.field_names, ["", "column0", "column1"])
- fields_names_1 = {"column2": 2, "column3": 3}
- table = self.c_table.create_row_table(fields_names_1)
- self.assertEqual(table.field_names, ["", "column2", "column3"])
+ # fields_names_1 = {"column2": 2, "column3": 3}
+ # table = self.c_table.create_row_table(fields_names_1)
+ # self.assertEqual(table.field_names, ["", "column2", "column3"])
- def test_add_row_table_row(self):
- self.c_table.field_names = []
+ # def test_add_row_table_row(self):
+ # self.c_table.field_names = []
- table = self.c_table.create_row_table({"column0": 0, "column1": 1})
- tags = {"row": "0"}
- field = {"column0": 0, "column1":1}
- self.c_table.add_row_table_row(None, tags, field)
+ # table = self.c_table.create_row_table({"column0": 0, "column1": 1})
+ # tags = {"row": "0"}
+ # field = {"column0": 0, "column1":1}
+ # self.c_table.add_row_table_row(None, tags, field)
- table_str = table.get_string()
- row_count = len(table_str.split("\n")) - 1
- self.assertEqual(row_count, 0)
+ # table_str = table.get_string()
+ # row_count = len(table_str.split("\n")) - 1
+ # self.assertEqual(row_count, 0)
- self.c_table.add_row_table_row(table, tags, field)
- row_count = len(table_str.split("\n")) - 1
- self.assertEqual(row_count, 0)
+ # self.c_table.add_row_table_row(table, tags, field)
+ # row_count = len(table_str.split("\n")) - 1
+ # self.assertEqual(row_count, 0)
def test_add_table_column(self):
head = "policy_hit"
@@ -305,7 +308,7 @@ class TestCounterTable(unittest.TestCase):
self.assertEqual(len(table.field_names), 2)
- def test__build_columns_tables(self):
+ def test_build_columns_tables(self):
self.c_table.columns = []
for i in range(100):
head = "h" + str(i)
@@ -315,7 +318,7 @@ class TestCounterTable(unittest.TestCase):
for _ in range(5):
self.c_table.column_size = random.randint(1, 100)
self.c_table.tables = []
- self.c_table._CounterTable__build_columns_tables()
+ self.c_table.build_columns_tables()
table_size = self.c_table.column_size //(self.c_table.min_width + self.c_table.COLUMM_PADDING)
if 0 == table_size:
@@ -352,6 +355,37 @@ class TestCounterTable(unittest.TestCase):
+class TestTableFormatTable(unittest.TestCase):
+ def setUp(self):
+ self.c_table = TableFormatTable()
+
+
+ def test_create_table(self):
+ fields_names_0 = {"column0": 0, "column1": 1}
+ table = self.c_table.create_table(fields_names_0)
+ self.assertEqual(table.field_names, ["", "column0", "column1"])
+
+ fields_names_1 = {"column2": 2, "column3": 3}
+ table = self.c_table.create_table(fields_names_1)
+ self.assertEqual(table.field_names, ["", "column2", "column3"])
+
+ def test_add_table_row(self):
+ self.c_table.field_names = []
+
+ table = self.c_table.create_table({"column0": 0, "column1": 1})
+ tags = {"row": "0"}
+ field = {"column0": 0, "column1":1}
+ self.c_table.add_table_row(None, tags, field)
+
+ table_str = table.get_string()
+ row_count = len(table_str.split("\n")) - 1
+ self.assertEqual(row_count, 0)
+
+ self.c_table.add_table_row(table, tags, field)
+ row_count = len(table_str.split("\n")) - 1
+ self.assertEqual(row_count, 0)
+
+
class TestHistogramTable(unittest.TestCase):
def setUp(self):
self.h_table = HistogramTable()
@@ -361,51 +395,77 @@ class TestHistogramTable(unittest.TestCase):
"CAgICAgICAgICAgICAgICAgICAgIAAAAAAAAA"
self.c_hist = FieldstatAPI.libfieldstat.fieldstat_histogram_base64_decode(self.hist_val.encode('utf-8'))
- def test__build_summary_format(self):
- for _ in range(5):
- table = PrettyTable()
- n_bins = random.randint(1, 100)
- self.h_table.bins = []
- for i in range(1, n_bins + 1):
- self.h_table.bins.append(i * 0.01)
- self.h_table._HistogramTable__build_histogram_format(self.c_hist, table)
- self.assertEqual(len(table.field_names), n_bins)
+ def test__get_row_shared_values(self):
+ shared_values = self.h_table._HistogramTable__get_row_shared_values(self.c_hist)
+ self.assertEqual(shared_values, ['99', '1', '49.51', '28.85', '100'])
- def test__build_histogram_format(self):
- for _ in range(5):
- table = PrettyTable()
- n_bins = random.randint(1, 100)
- self.h_table.bins = []
- for i in range(1, n_bins + 1):
- self.h_table.bins.append(i)
+ def test__get_row_values(self):
+ self.h_table.bins = [0.1, 0.5, 0.8, 0.9, 0.95, 0.99]
+ self.h_table.format = "summary"
+ row_values = self.h_table._HistogramTable__get_row_values(self.c_hist)
+ self.assertEqual(row_values, ['9', '49', '79', '89', '94', '98', '99', '1', '49.51', '28.85', '100'])
+ self.h_table.bins = [10, 50, 80, 90, 95, 99]
+ self.h_table.format = "histogram"
+ row_values = self.h_table._HistogramTable__get_row_values(self.c_hist)
+ self.assertEqual(row_values, ['10', '50', '80', '90', '95', '99', '99', '1', '49.51', '28.85', '100'])
+
- self.h_table._HistogramTable__build_histogram_format(self.c_hist, table)
- self.assertEqual(len(table.field_names), n_bins)
+ def test__add_table_field_names(self):
+ table_summ = PrettyTable()
+ self.h_table.bins = [0.1, 0.5, 0.8, 0.9, 0.95, 0.99]
+ self.h_table.format = "summary"
+ self.h_table._HistogramTable__add_table_field_names(table_summ)
+ self.assertEqual(table_summ.field_names, ['', '10.00%', '50.00%', '80.00%', '90.00%', '95.00%', '99.00%', 'MAX', 'MIN', 'AVG', 'STDDEV', 'CNT'])
+
+ table_hist = PrettyTable()
+ self.h_table.bins = [10, 50, 80, 90, 95, 99]
+ self.h_table.format = "histogram"
+ self.h_table._HistogramTable__add_table_field_names(table_hist)
+ self.assertEqual(table_hist.field_names, ['', 'le=10', 'le=50', 'le=80', 'le=90', 'le=95', 'le=99', 'MAX', 'MIN', 'AVG', 'STDDEV', 'CNT'])
+
+
+ def test__add_table_row(self):
+ table = PrettyTable()
+ self.h_table.bins = [0.1, 0.5, 0.8, 0.9, 0.95, 0.99]
+ self.h_table.format = "summary"
+ self.h_table._HistogramTable__add_table_field_names(table)
+ self.h_table._HistogramTable__add_table_row(table, self.hist_val, "acc")
+ table_str = table.get_string()
+ row_count = len(table_str.split("\n"))
+ self.assertEqual(row_count, 5)
+ self.h_table._HistogramTable__add_table_row(table, self.hist_val, "delta")
+ table_str = table.get_string()
+ row_count = len(table_str.split("\n"))
+ self.assertEqual(row_count, 6)
+
+
+ def test_build_table(self):
+ tags = "{\"thread_id\": 1}"
+ key = "hit_policy"
+ self.h_table.build_table(tags, key, self.hist_val, self.hist_val, 1000)
+ table_str = self.h_table.tables[-1].get_string()
+ row_count = len(table_str.split("\n"))
+ self.assertEqual(row_count, 7)
def test_build_table(self):
tags = "{\"thread_id\": 1,\"action\": \"deny\"}"
key = "policy_hit"
- value = self.hist_val
+ self.h_table.format = "summary"
for _ in range(5):
n_bins = random.randint(1, 100)
self.h_table.bins = []
for i in range(1, n_bins + 1):
self.h_table.bins.append(i * 0.01)
-
- self.h_table.build_table(tags, key, value)
+ self.h_table.build_table(tags, key, self.hist_val, self.hist_val, 1000)
table = self.h_table.tables[-1]
- self.assertEqual(len(table.field_names), n_bins + 5)
-
- for _ in range(5):
- n_operate = random.randint(1, 100)
- self.h_table.tables = []
- for _ in range (1, n_operate + 1):
- self.h_table.build_table(tags, key, value)
- self.assertEqual(len(self.h_table.tables), n_operate)
+ self.assertEqual(len(table.field_names), n_bins + 6)
+ table_str = self.h_table.tables[-1].get_string()
+ row_count = len(table_str.split("\n"))
+ self.assertEqual(row_count, 7)
def test_print_tables(self):
@@ -417,7 +477,7 @@ class TestHistogramTable(unittest.TestCase):
n_operate = random.randint(1, 100)
self.h_table.tables = []
for _ in range (n_operate):
- self.h_table.build_table(tags, key, value)
+ self.h_table.build_table(tags, key, self.hist_val, None, 1000)
output = StringIO()
sys.stdout = output
@@ -426,7 +486,7 @@ class TestHistogramTable(unittest.TestCase):
sys.stdout = sys.__stdout__
self.assertEqual(len(self.h_table.tables), n_operate)
- self.assertEqual(len(output_str.split('\n')), n_operate * 7 + 1)
+ self.assertEqual(len(output_str.split('\n')), n_operate * 6 + 1)
def tearDown(self):
@@ -446,11 +506,13 @@ class TestLocalExporter(unittest.TestCase):
"send_log": "sum"
},
"fields": {
- "T_fail_log": 2,
- "T_fail_log_delta": 2
+ "T_fail_log": 2
+ },
+ "fields_delta": {
+ "T_fail_log": 2
},
"timestamp_ms": 1000,
- "timestamp_ms_delta": 0
+ "timestamp_ms_delta": 1000
}
self.hll_json_object = { "name": "-",
@@ -461,7 +523,7 @@ class TestLocalExporter(unittest.TestCase):
"acc_ip": "AQUFEGDCAhAwhAMMIQQBBBCDBRBggQMEMIcAAADCAAAAAAAAAA=="
},
"timestamp_ms": 100010,
- "timestamp_ms_delta": 100010
+ "timestamp_ms_delta": 0
}
@@ -498,172 +560,76 @@ class TestLocalExporter(unittest.TestCase):
self.assertEqual(self.local.display_hll, k)
- def test__dealwith_counter(self):
- value = 100
- speed_s = 1.1
-
- peradd = len(self.local.ctable.columns)
- self.local._LocalExporter__dealwith_counter(self.tags, self.key, value, speed_s)
- postadd = len(self.local.ctable.columns)
-
- self.assertEqual(postadd - peradd, 1)
+ def test__build_counter_type_exporter(self):
+ for _ in range(5):
+ val = random.randint(1, 100)
+ val_delta = random.randint(10, 20)
+ tsms_delta = random.randint(1, 10) * 1000
+ self.local.ctable.columns = []
+ self.local._LocalExporter__build_counter_type_exporter(self.tags, self.key, val, val_delta, tsms_delta)
+ self.assertEqual(self.local.ctable.columns[-1][1][1], str(val))
+ self.assertEqual(self.local.ctable.columns[-1][1][2], "{:.2f}".format(val_delta*1000/tsms_delta))
- def test__dealwith_histogram(self):
+ def test__build_histogram_type_exporter(self):
hist_val = "HISTEwAAAGQAAAAAAAAAAwAAAAAAAAABAAAAAAAJJ8A/8AAAAAAAA"\
"AEEAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAg"\
"ICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgI"\
"CAgICAgICAgICAgICAgICAgICAgIAAAAAAAAA"
peradd = len(self.local.htable.tables)
- self.local._LocalExporter__dealwith_histogram(self.tags, self.key, hist_val)
+ self.local._LocalExporter__build_histogram_type_exporter(self.tags, self.key, hist_val, None, 0)
postadd = len(self.local.htable.tables)
-
self.assertEqual(postadd - peradd, 1)
- def test__dealwith_hll(self):
+ def test__build_hll_type_exporter(self):
value = "AQUBDECDAQxAQQUIIEEJCDCFARgQRAUIMIMAAAECAAAAAAAAAA=="
peradd = len(self.local.hlltable.columns)
- self.local._LocalExporter__dealwith_hll(self.tags, self.key, value)
+ self.local._LocalExporter__build_hll_type_exporter(self.tags, self.key, value)
postadd = len(self.local.hlltable.columns)
self.assertEqual(postadd - peradd, 1)
- def test__parse_json_tags(self):
- tags = self.local._LocalExporter__parse_json_tags(self.counter_json_object)
+ def test__append_app_name_to_tags(self):
+ tags = self.local._LocalExporter__append_app_name_to_tags(self.counter_json_object)
self.assertEqual(tags, "{\"send_log\": \"sum\", \"app_name\": \"-\"}")
- def test__get_counter_speed_value(self):
- speed_s = self.local._LocalExporter__get_counter_speed_value("T_fail_log", self.counter_json_object["fields"], self.counter_json_object)
- self.assertEqual(speed_s, 2)
-
- def test__match_tags(self):
+ def test__is_tags_matched(self):
self.local.match_tags = {}
tags = {"action": "deny", "policy_id": 0, "hit_rate": 1.1}
- ret = self.local._LocalExporter__match_tags(tags)
+ ret = self.local._LocalExporter__is_tags_matched(tags)
self.assertEqual(ret, True)
self.local.match_tags = {"action": "deny"}
tags = {}
- ret = self.local._LocalExporter__match_tags(tags)
+ ret = self.local._LocalExporter__is_tags_matched(tags)
self.assertEqual(ret, False)
self.local.match_tags = {"action": "deny", "policy_id": 0, "hit_rate": 1.1}
tags = {"action": "deny"}
- ret = self.local._LocalExporter__match_tags(tags)
+ ret = self.local._LocalExporter__is_tags_matched(tags)
self.assertEqual(ret, False)
self.local.match_tags = {"action": "deny", "policy_id": 0, "hit_rate": 1.10}
tags = {"action": "deny", "policy_id": 0, "hit_rate": 1.1}
- ret = self.local._LocalExporter__match_tags(tags)
+ ret = self.local._LocalExporter__is_tags_matched(tags)
self.assertEqual(ret, True)
- def test__generate_table_bundle(self):
- json_data_0 = [{ "name": "-",
- "tags": {
- "send_log": "sum"
- },
- "fields": {
- "T_fail_log": 2,
- "T_fail_log_delta": 2
- },
- "timestamp_ms": 1000,
- "timestamp_ms_delta": 0
- }]
- table_bundle_0 = self.local._LocalExporter__generate_table_bundle(json_data_0)
- self.assertEqual(table_bundle_0["not_table_field"], [])
- self.assertEqual(len(table_bundle_0["send_logT_fail_logT_fail_log_delta-"]), 1)
-
- json_data_1 = [{ "name": "-",
- "tags": {
- "send_log": "sum"
- },
- "fields": {
- "T_fail_log": 2,
- "T_fail_log_delta": 2
- },
- "timestamp_ms": 1000,
- "timestamp_ms_delta": 0
- },
- { "name": "-",
- "tags": {
- "send_log": "sum"
- },
- "fields": {
- "T_fail_log": 2,
- "T_fail_log_delta": 2
- },
- "timestamp_ms": 1000,
- "timestamp_ms_delta": 0
- }]
- table_bundle_1 = self.local._LocalExporter__generate_table_bundle(json_data_1)
- self.assertEqual(table_bundle_1["not_table_field"], [])
- self.assertEqual(len(table_bundle_1["send_logT_fail_logT_fail_log_delta-"]), 2)
-
- json_data_2 = [{ "name": "-",
- "tags": {
- "send_log": "sum"
- },
- "fields": {
- "T_fail_log": 2,
- "T_fail_log_delta": 2
- },
- "timestamp_ms": 1000,
- "timestamp_ms_delta": 0
- },
- { "name": "-",
- "tags": {
- "send_log_0": "sum"
- },
- "fields": {
- "T_fail_log": 2,
- "T_fail_log_delta": 2
- },
- "timestamp_ms": 1000,
- "timestamp_ms_delta": 0
- }]
- table_bundle_2 = self.local._LocalExporter__generate_table_bundle(json_data_2)
- self.assertEqual(table_bundle_2["not_table_field"], [])
- self.assertEqual(len(table_bundle_2["send_logT_fail_logT_fail_log_delta-"]), 1)
- self.assertEqual(len(table_bundle_2["send_log_0T_fail_logT_fail_log_delta-"]), 1)
- json_data_3 = [{ "name": "-",
+ def test_read_match_tags_json_objects(self):
+ json_objects = [{ "name": "-",
"tags": {
"send_log": "sum"
},
"fields": {
- "T_fail_log": 2,
- "T_fail_log_delta": 2
- },
- "timestamp_ms": 1000,
- "timestamp_ms_delta": 0
- },
- { "name": "-",
- "tags": {
- "send_log": "firewall"
- },
- "fields": {
- "T_fail_log": 2,
- "T_fail_log_delta": 2
- },
- "timestamp_ms": 1000,
- "timestamp_ms_delta": 0
- }]
- table_bundle_3 = self.local._LocalExporter__generate_table_bundle(json_data_3)
- self.assertEqual(table_bundle_3["not_table_field"], [])
- self.assertEqual(len(table_bundle_3["send_logT_fail_logT_fail_log_delta-"]), 2)
-
- json_data_4 = [{ "name": "-",
- "tags": {
- "send_log": "sum"
+ "T_fail_log": 2
},
- "fields": {
- "T_fail_log": 2,
- "T_fail_log_delta": 2
+ "fields_delta": {
+ "T_fail_log": 2
},
"timestamp_ms": 1000,
"timestamp_ms_delta": 0
@@ -673,69 +639,40 @@ class TestLocalExporter(unittest.TestCase):
"send_log": "firewall"
},
"fields": {
- "T_fail_log": 2,
- "T_fail_log_delta": 2
- },
- "timestamp_ms": 1000,
- "timestamp_ms_delta": 0
- }]
- self.local.disable_table = True
- table_bundle_4 = self.local._LocalExporter__generate_table_bundle(json_data_4)
- self.assertEqual(len(table_bundle_4["not_table_field"]), 2)
-
- def test__read_match_tags_objects(self):
- json_objects = [{ "name": "-",
- "tags": {
- "send_log": "sum"
+ "T_fail_log": 2
},
- "fields": {
- "T_fail_log": 2,
- "T_fail_log_delta": 2
- },
- "timestamp_ms": 1000,
- "timestamp_ms_delta": 0
- },
- { "name": "-",
- "tags": {
- "send_log": "firewall"
- },
- "fields": {
- "T_fail_log": 2,
- "T_fail_log_delta": 2
+ "fields_delta": {
+ "T_fail_log": 2
},
"timestamp_ms": 1000,
"timestamp_ms_delta": 0
}]
self.local.match_tags = {}
- match_object = self.local._LocalExporter__read_match_tags_objects(json_objects)
+ match_object = self.local.read_match_tags_json_objects(json_objects)
self.assertEqual(len(match_object), 2)
self.local.match_tags = {"test": 1}
- match_object = self.local._LocalExporter__read_match_tags_objects(json_objects)
+ match_object = self.local.read_match_tags_json_objects(json_objects)
self.assertEqual(len(match_object), 0)
self.local.match_tags = {"send_log": "firewall"}
- match_object = self.local._LocalExporter__read_match_tags_objects(json_objects)
+ match_object = self.local.read_match_tags_json_objects(json_objects)
self.assertEqual(len(match_object), 1)
- def test_parse_data(self):
+ def test_read_json_objects_from_file(self):
self.local.hlltable = CounterTable()
self.local.ctable = CounterTable()
self.local.htable = HistogramTable()
self.local.json_path = "/tmp/noexist.json"
- self.local.parse_data()
- self.assertEqual(len(self.local.ctable.columns), 0)
- self.assertEqual(len(self.local.htable.tables), 0)
- self.assertEqual(len(self.local.hlltable.columns), 0)
+ objects0 = self.local.read_json_objects_from_file()
+ self.assertEqual(len(objects0), 0)
self.local.json_path = FIELDSTAT_INPUT_JSON_PATH
- self.local.parse_data()
- self.assertEqual(len(self.local.ctable.columns), 18)
- self.assertEqual(len(self.local.htable.tables), 8)
- self.assertEqual(len(self.local.hlltable.columns), 3)
+ objects1 = self.local.read_json_objects_from_file()
+ self.assertGreater(len(objects1), 0)
class TestFieldstatExporter(unittest.TestCase):
@@ -783,14 +720,13 @@ class TestFieldstatExporter(unittest.TestCase):
self.assertEqual(args.loop, False)
self.assertEqual(args.clear_screen, False)
- args = parser.parse_args(["local", "--loop", "--clear-screen", "-i", "1000", "--display-hist", "--display-hll", "--display-counter", "--disable-table", "--match-tags", "policy:1,rule:1"])
+ args = parser.parse_args(["local", "--loop", "--clear-screen", "-i", "1000", "--display-hist", "--display-hll", "--display-counter", "--match-tags", "policy:1,rule:1"])
self.assertEqual(args.interval, 1000)
self.assertEqual(args.loop, True)
self.assertEqual(args.clear_screen, True)
self.assertEqual(args.display_counter, True)
self.assertEqual(args.display_hist, True)
self.assertEqual(args.display_hll, True)
- self.assertEqual(args.disable_table, True)
self.assertEqual(args.match_tags, "policy:1,rule:1")