tracer/gsttr-stats: adding some filtering options

Also adding a way to show what is in the file.
This commit is contained in:
Stefan Sauer 2016-12-16 15:00:04 +01:00
parent 2239c8b7ac
commit b2576972ce
2 changed files with 125 additions and 65 deletions

View file

@ -1,16 +1,27 @@
#!/usr/bin/env python3
'''
How to run:
Offline:
1) generate some log
GST_DEBUG="GST_TRACER:7" GST_TRACERS="stats;rusage;latency" GST_DEBUG_FILE=trace.log <application>
2) print everything
python3 gsttr-stats.py trace.log
3) print selected entries only
python3 gsttr-stats.py -c latency trace.log
'''
import logging
from fnmatch import fnmatch
# TODO:
# options
# - list what is in the log
# - select which values to extract
# more options
# - live-update interval (for file=='-')
#
# - for values like timestamps, we only want min/max but no average
logging.basicConfig(level=logging.WARNING)
logger = logging.getLogger('gsttr-stats')
from tracer.analysis_runner import AnalysisRunner
from tracer.analyzer import Analyzer
@ -28,8 +39,9 @@ _NUMERIC_TYPES = ('int', 'uint', 'gint', 'guint', 'gint64', 'guint64')
class Stats(Analyzer):
def __init__(self):
def __init__(self, classes):
super(Stats, self).__init__()
self.classes = classes
self.records = {}
self.data = {}
@ -51,6 +63,8 @@ class Stats(Analyzer):
record['scope'][k] = v
elif v.name == 'value':
# skip non numeric and those without min/max
# TODO: skip them only if flags != AGGREGATED
# - if flag is AGGREGATED, don't calc average
if (v.values['type'] in _NUMERIC_TYPES and
'min' in v.values and 'max' in v.values):
# TODO only for debugging
@ -63,13 +77,23 @@ class Stats(Analyzer):
def handle_tracer_entry(self, event):
# use first field in message (structure-id) if none
if event[Parser.F_FUNCTION]:
# TODO: parse params in event[Parser.F_MESSAGE]
entry_name = event[Parser.F_FUNCTION]
else:
return
try:
s = Structure(event[Parser.F_MESSAGE])
except ValueError:
logger.warning("failed to parse: '%s'", event[Parser.F_MESSAGE])
return
entry_name = s.name
if self.classes:
if not any([fnmatch(entry_name, c) for c in self.classes]):
return
record = self.records.get(entry_name)
if record:
if not record:
return
# aggregate event based on class
for sk,sv in record['scope'].items():
# look up bin by scope (or create new)
@ -80,6 +104,10 @@ class Stats(Analyzer):
scope = {}
self.data[key] = scope
for vk,vv in record['value'].items():
# skip optional fields
if not vk in s.values:
continue
key = entry_name + "/" + vk
data = scope.get(key)
if not data:
@ -93,7 +121,6 @@ class Stats(Analyzer):
scope[key] = data
# update min/max/sum and count via value
dv = int(s.values[vk])
# TODO: skip 64bit -1 values ?
data['num'] += 1
data['sum'] += dv
if 'min' in data:
@ -101,6 +128,39 @@ class Stats(Analyzer):
if 'max' in data:
data['max'] = max(dv, data['max'])
def report(self):
# iterate scopes
for sk,sv in self.data.items():
# iterate tracers
for tk,tv in sv.items():
mi = tv.get('min', '-')
ma = tv.get('max', '-')
avg = tv['sum']/tv['num']
if is_time_field(tk):
if mi != '-':
mi = format_ts(mi)
if ma != '-':
ma = format_ts(ma)
avg = format_ts(avg)
if mi == ma:
print("%-45s: Avg %30s: %s" % (sk, tk, avg))
else:
print("%-45s: Min/Avg/Max %30s: %s, %s, %s" %
(sk, tk, mi, avg, ma))
class ListClasses(Analyzer):
def __init__(self):
super(ListClasses, self).__init__()
def handle_tracer_class(self, event):
s = Structure(event[Parser.F_MESSAGE])
print(s.name)
def handle_tracer_entry(self, event):
raise StopIteration
def format_ts(ts):
sec = 1e9
h = int(ts // (sec * 60 * 60))
@ -117,25 +177,22 @@ if __name__ == '__main__':
import argparse
parser = argparse.ArgumentParser()
parser.add_argument('file', nargs='?', default='debug.log')
parser.add_argument('-c', '--class', action='append', dest='classes',
help='tracer class selector (default: all)')
parser.add_argument('-l', '--list-classes', action='store_true',
help='show tracer classes')
args = parser.parse_args()
analyzer = None
if args.list_classes:
analyzer = ListClasses()
else:
analyzer = stats = Stats(args.classes)
with Parser(args.file) as log:
stats = Stats()
runner = AnalysisRunner(log)
runner.add_analyzer(stats)
runner.add_analyzer(analyzer)
runner.run()
# iterate scopes
for sk,sv in stats.data.items():
# iterate tracers
for tk,tv in sv.items():
mi = tv.get('min', '-')
ma = tv.get('max', '-')
avg = tv['sum']/tv['num']
if is_time_field(tk):
if mi != '-':
mi = format_ts(mi)
if ma != '-':
ma = format_ts(ma)
avg = format_ts(avg)
print("%-45s: Min/Avg,Max %30s: %s, %s, %s" % (sk, tk, mi, avg, ma))
if not args.list_classes:
stats.report()

View file

@ -28,6 +28,7 @@ class AnalysisRunner(object):
return (not event[Parser.F_LINE] and not event[Parser.F_FILENAME])
def run(self):
try:
for event in self.log:
# check if it is a tracer.class or tracer event
if self.is_tracer_class(event):
@ -36,3 +37,5 @@ class AnalysisRunner(object):
self.handle_tracer_entry(event)
#else:
# print("unhandled:", repr(event))
except StopIteration:
pass