tracer/gsttr-stats: adding some filtering options

Also adding a way to show what is in the file.
This commit is contained in:
Stefan Sauer 2016-12-16 15:00:04 +01:00
parent 2239c8b7ac
commit b2576972ce
2 changed files with 125 additions and 65 deletions

View file

@ -1,16 +1,27 @@
#!/usr/bin/env python3 #!/usr/bin/env python3
''' '''
How to run: How to run:
Offline: 1) generate some log
GST_DEBUG="GST_TRACER:7" GST_TRACERS="stats;rusage;latency" GST_DEBUG_FILE=trace.log <application> GST_DEBUG="GST_TRACER:7" GST_TRACERS="stats;rusage;latency" GST_DEBUG_FILE=trace.log <application>
2) print everything
python3 gsttr-stats.py trace.log python3 gsttr-stats.py trace.log
3) print selected entries only
python3 gsttr-stats.py -c latency trace.log
''' '''
import logging
from fnmatch import fnmatch
# TODO: # TODO:
# options # more options
# - list what is in the log
# - select which values to extract
# - live-update interval (for file=='-') # - live-update interval (for file=='-')
#
# - for values like timestamps, we only want min/max but no average
logging.basicConfig(level=logging.WARNING)
logger = logging.getLogger('gsttr-stats')
from tracer.analysis_runner import AnalysisRunner from tracer.analysis_runner import AnalysisRunner
from tracer.analyzer import Analyzer from tracer.analyzer import Analyzer
@ -28,8 +39,9 @@ _NUMERIC_TYPES = ('int', 'uint', 'gint', 'guint', 'gint64', 'guint64')
class Stats(Analyzer): class Stats(Analyzer):
def __init__(self): def __init__(self, classes):
super(Stats, self).__init__() super(Stats, self).__init__()
self.classes = classes
self.records = {} self.records = {}
self.data = {} self.data = {}
@ -51,6 +63,8 @@ class Stats(Analyzer):
record['scope'][k] = v record['scope'][k] = v
elif v.name == 'value': elif v.name == 'value':
# skip non numeric and those without min/max # skip non numeric and those without min/max
# TODO: skip them only if flags != AGGREGATED
# - if flag is AGGREGATED, don't calc average
if (v.values['type'] in _NUMERIC_TYPES and if (v.values['type'] in _NUMERIC_TYPES and
'min' in v.values and 'max' in v.values): 'min' in v.values and 'max' in v.values):
# TODO only for debugging # TODO only for debugging
@ -63,43 +77,89 @@ class Stats(Analyzer):
def handle_tracer_entry(self, event): def handle_tracer_entry(self, event):
# use first field in message (structure-id) if none # use first field in message (structure-id) if none
if event[Parser.F_FUNCTION]: if event[Parser.F_FUNCTION]:
# TODO: parse params in event[Parser.F_MESSAGE] return
entry_name = event[Parser.F_FUNCTION]
else: try:
s = Structure(event[Parser.F_MESSAGE]) s = Structure(event[Parser.F_MESSAGE])
entry_name = s.name except ValueError:
record = self.records.get(entry_name) logger.warning("failed to parse: '%s'", event[Parser.F_MESSAGE])
if record: return
# aggregate event based on class entry_name = s.name
for sk,sv in record['scope'].items():
# look up bin by scope (or create new) if self.classes:
key = (_SCOPE_RELATED_TO[sv.values['related-to']] + if not any([fnmatch(entry_name, c) for c in self.classes]):
":" + str(s.values[sk])) return
scope = self.data.get(key)
if not scope: record = self.records.get(entry_name)
scope = {} if not record:
self.data[key] = scope return
for vk,vv in record['value'].items():
key = entry_name + "/" + vk # aggregate event based on class
data = scope.get(key) for sk,sv in record['scope'].items():
if not data: # look up bin by scope (or create new)
data = { key = (_SCOPE_RELATED_TO[sv.values['related-to']] +
'num': 0, ":" + str(s.values[sk]))
'sum': 0, scope = self.data.get(key)
} if not scope:
if 'max' in vv.values and 'min' in vv.values: scope = {}
data['min'] = int(vv.values['max']) self.data[key] = scope
data['max'] = int(vv.values['min']) for vk,vv in record['value'].items():
scope[key] = data # skip optional fields
# update min/max/sum and count via value if not vk in s.values:
dv = int(s.values[vk]) continue
# TODO: skip 64bit -1 values ?
data['num'] += 1 key = entry_name + "/" + vk
data['sum'] += dv data = scope.get(key)
if 'min' in data: if not data:
data['min'] = min(dv, data['min']) data = {
if 'max' in data: 'num': 0,
data['max'] = max(dv, data['max']) 'sum': 0,
}
if 'max' in vv.values and 'min' in vv.values:
data['min'] = int(vv.values['max'])
data['max'] = int(vv.values['min'])
scope[key] = data
# update min/max/sum and count via value
dv = int(s.values[vk])
data['num'] += 1
data['sum'] += dv
if 'min' in data:
data['min'] = min(dv, data['min'])
if 'max' in data:
data['max'] = max(dv, data['max'])
def report(self):
# iterate scopes
for sk,sv in self.data.items():
# iterate tracers
for tk,tv in sv.items():
mi = tv.get('min', '-')
ma = tv.get('max', '-')
avg = tv['sum']/tv['num']
if is_time_field(tk):
if mi != '-':
mi = format_ts(mi)
if ma != '-':
ma = format_ts(ma)
avg = format_ts(avg)
if mi == ma:
print("%-45s: Avg %30s: %s" % (sk, tk, avg))
else:
print("%-45s: Min/Avg/Max %30s: %s, %s, %s" %
(sk, tk, mi, avg, ma))
class ListClasses(Analyzer):
def __init__(self):
super(ListClasses, self).__init__()
def handle_tracer_class(self, event):
s = Structure(event[Parser.F_MESSAGE])
print(s.name)
def handle_tracer_entry(self, event):
raise StopIteration
def format_ts(ts): def format_ts(ts):
sec = 1e9 sec = 1e9
@ -117,25 +177,22 @@ if __name__ == '__main__':
import argparse import argparse
parser = argparse.ArgumentParser() parser = argparse.ArgumentParser()
parser.add_argument('file', nargs='?', default='debug.log') parser.add_argument('file', nargs='?', default='debug.log')
parser.add_argument('-c', '--class', action='append', dest='classes',
help='tracer class selector (default: all)')
parser.add_argument('-l', '--list-classes', action='store_true',
help='show tracer classes')
args = parser.parse_args() args = parser.parse_args()
analyzer = None
if args.list_classes:
analyzer = ListClasses()
else:
analyzer = stats = Stats(args.classes)
with Parser(args.file) as log: with Parser(args.file) as log:
stats = Stats()
runner = AnalysisRunner(log) runner = AnalysisRunner(log)
runner.add_analyzer(stats) runner.add_analyzer(analyzer)
runner.run() runner.run()
# iterate scopes if not args.list_classes:
for sk,sv in stats.data.items(): stats.report()
# iterate tracers
for tk,tv in sv.items():
mi = tv.get('min', '-')
ma = tv.get('max', '-')
avg = tv['sum']/tv['num']
if is_time_field(tk):
if mi != '-':
mi = format_ts(mi)
if ma != '-':
ma = format_ts(ma)
avg = format_ts(avg)
print("%-45s: Min/Avg,Max %30s: %s, %s, %s" % (sk, tk, mi, avg, ma))

View file

@ -28,11 +28,14 @@ class AnalysisRunner(object):
return (not event[Parser.F_LINE] and not event[Parser.F_FILENAME]) return (not event[Parser.F_LINE] and not event[Parser.F_FILENAME])
def run(self): def run(self):
for event in self.log: try:
# check if it is a tracer.class or tracer event for event in self.log:
if self.is_tracer_class(event): # check if it is a tracer.class or tracer event
self.handle_tracer_class(event) if self.is_tracer_class(event):
elif self.is_tracer_entry(event): self.handle_tracer_class(event)
self.handle_tracer_entry(event) elif self.is_tracer_entry(event):
#else: self.handle_tracer_entry(event)
# print("unhandled:", repr(event)) #else:
# print("unhandled:", repr(event))
except StopIteration:
pass