mirror of
https://gitlab.freedesktop.org/gstreamer/gstreamer.git
synced 2024-12-24 17:20:36 +00:00
tracer/gsttr-stats: adding some filtering options
Also adding a way to show what is in the file.
This commit is contained in:
parent
2239c8b7ac
commit
b2576972ce
2 changed files with 125 additions and 65 deletions
|
@ -1,16 +1,27 @@
|
|||
#!/usr/bin/env python3
|
||||
'''
|
||||
How to run:
|
||||
Offline:
|
||||
1) generate some log
|
||||
GST_DEBUG="GST_TRACER:7" GST_TRACERS="stats;rusage;latency" GST_DEBUG_FILE=trace.log <application>
|
||||
|
||||
2) print everything
|
||||
python3 gsttr-stats.py trace.log
|
||||
|
||||
3) print selected entries only
|
||||
python3 gsttr-stats.py -c latency trace.log
|
||||
'''
|
||||
|
||||
import logging
|
||||
from fnmatch import fnmatch
|
||||
|
||||
# TODO:
|
||||
# options
|
||||
# - list what is in the log
|
||||
# - select which values to extract
|
||||
# more options
|
||||
# - live-update interval (for file=='-')
|
||||
#
|
||||
# - for values like timestamps, we only want min/max but no average
|
||||
|
||||
logging.basicConfig(level=logging.WARNING)
|
||||
logger = logging.getLogger('gsttr-stats')
|
||||
|
||||
from tracer.analysis_runner import AnalysisRunner
|
||||
from tracer.analyzer import Analyzer
|
||||
|
@ -28,8 +39,9 @@ _NUMERIC_TYPES = ('int', 'uint', 'gint', 'guint', 'gint64', 'guint64')
|
|||
|
||||
class Stats(Analyzer):
|
||||
|
||||
def __init__(self):
|
||||
def __init__(self, classes):
|
||||
super(Stats, self).__init__()
|
||||
self.classes = classes
|
||||
self.records = {}
|
||||
self.data = {}
|
||||
|
||||
|
@ -51,6 +63,8 @@ class Stats(Analyzer):
|
|||
record['scope'][k] = v
|
||||
elif v.name == 'value':
|
||||
# skip non numeric and those without min/max
|
||||
# TODO: skip them only if flags != AGGREGATED
|
||||
# - if flag is AGGREGATED, don't calc average
|
||||
if (v.values['type'] in _NUMERIC_TYPES and
|
||||
'min' in v.values and 'max' in v.values):
|
||||
# TODO only for debugging
|
||||
|
@ -63,43 +77,89 @@ class Stats(Analyzer):
|
|||
def handle_tracer_entry(self, event):
|
||||
# use first field in message (structure-id) if none
|
||||
if event[Parser.F_FUNCTION]:
|
||||
# TODO: parse params in event[Parser.F_MESSAGE]
|
||||
entry_name = event[Parser.F_FUNCTION]
|
||||
else:
|
||||
return
|
||||
|
||||
try:
|
||||
s = Structure(event[Parser.F_MESSAGE])
|
||||
entry_name = s.name
|
||||
record = self.records.get(entry_name)
|
||||
if record:
|
||||
# aggregate event based on class
|
||||
for sk,sv in record['scope'].items():
|
||||
# look up bin by scope (or create new)
|
||||
key = (_SCOPE_RELATED_TO[sv.values['related-to']] +
|
||||
":" + str(s.values[sk]))
|
||||
scope = self.data.get(key)
|
||||
if not scope:
|
||||
scope = {}
|
||||
self.data[key] = scope
|
||||
for vk,vv in record['value'].items():
|
||||
key = entry_name + "/" + vk
|
||||
data = scope.get(key)
|
||||
if not data:
|
||||
data = {
|
||||
'num': 0,
|
||||
'sum': 0,
|
||||
}
|
||||
if 'max' in vv.values and 'min' in vv.values:
|
||||
data['min'] = int(vv.values['max'])
|
||||
data['max'] = int(vv.values['min'])
|
||||
scope[key] = data
|
||||
# update min/max/sum and count via value
|
||||
dv = int(s.values[vk])
|
||||
# TODO: skip 64bit -1 values ?
|
||||
data['num'] += 1
|
||||
data['sum'] += dv
|
||||
if 'min' in data:
|
||||
data['min'] = min(dv, data['min'])
|
||||
if 'max' in data:
|
||||
data['max'] = max(dv, data['max'])
|
||||
except ValueError:
|
||||
logger.warning("failed to parse: '%s'", event[Parser.F_MESSAGE])
|
||||
return
|
||||
entry_name = s.name
|
||||
|
||||
if self.classes:
|
||||
if not any([fnmatch(entry_name, c) for c in self.classes]):
|
||||
return
|
||||
|
||||
record = self.records.get(entry_name)
|
||||
if not record:
|
||||
return
|
||||
|
||||
# aggregate event based on class
|
||||
for sk,sv in record['scope'].items():
|
||||
# look up bin by scope (or create new)
|
||||
key = (_SCOPE_RELATED_TO[sv.values['related-to']] +
|
||||
":" + str(s.values[sk]))
|
||||
scope = self.data.get(key)
|
||||
if not scope:
|
||||
scope = {}
|
||||
self.data[key] = scope
|
||||
for vk,vv in record['value'].items():
|
||||
# skip optional fields
|
||||
if not vk in s.values:
|
||||
continue
|
||||
|
||||
key = entry_name + "/" + vk
|
||||
data = scope.get(key)
|
||||
if not data:
|
||||
data = {
|
||||
'num': 0,
|
||||
'sum': 0,
|
||||
}
|
||||
if 'max' in vv.values and 'min' in vv.values:
|
||||
data['min'] = int(vv.values['max'])
|
||||
data['max'] = int(vv.values['min'])
|
||||
scope[key] = data
|
||||
# update min/max/sum and count via value
|
||||
dv = int(s.values[vk])
|
||||
data['num'] += 1
|
||||
data['sum'] += dv
|
||||
if 'min' in data:
|
||||
data['min'] = min(dv, data['min'])
|
||||
if 'max' in data:
|
||||
data['max'] = max(dv, data['max'])
|
||||
|
||||
def report(self):
|
||||
# iterate scopes
|
||||
for sk,sv in self.data.items():
|
||||
# iterate tracers
|
||||
for tk,tv in sv.items():
|
||||
mi = tv.get('min', '-')
|
||||
ma = tv.get('max', '-')
|
||||
avg = tv['sum']/tv['num']
|
||||
if is_time_field(tk):
|
||||
if mi != '-':
|
||||
mi = format_ts(mi)
|
||||
if ma != '-':
|
||||
ma = format_ts(ma)
|
||||
avg = format_ts(avg)
|
||||
if mi == ma:
|
||||
print("%-45s: Avg %30s: %s" % (sk, tk, avg))
|
||||
else:
|
||||
print("%-45s: Min/Avg/Max %30s: %s, %s, %s" %
|
||||
(sk, tk, mi, avg, ma))
|
||||
|
||||
class ListClasses(Analyzer):
|
||||
|
||||
def __init__(self):
|
||||
super(ListClasses, self).__init__()
|
||||
|
||||
def handle_tracer_class(self, event):
|
||||
s = Structure(event[Parser.F_MESSAGE])
|
||||
print(s.name)
|
||||
|
||||
def handle_tracer_entry(self, event):
|
||||
raise StopIteration
|
||||
|
||||
|
||||
def format_ts(ts):
|
||||
sec = 1e9
|
||||
|
@ -117,25 +177,22 @@ if __name__ == '__main__':
|
|||
import argparse
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument('file', nargs='?', default='debug.log')
|
||||
parser.add_argument('-c', '--class', action='append', dest='classes',
|
||||
help='tracer class selector (default: all)')
|
||||
parser.add_argument('-l', '--list-classes', action='store_true',
|
||||
help='show tracer classes')
|
||||
args = parser.parse_args()
|
||||
|
||||
analyzer = None
|
||||
if args.list_classes:
|
||||
analyzer = ListClasses()
|
||||
else:
|
||||
analyzer = stats = Stats(args.classes)
|
||||
|
||||
with Parser(args.file) as log:
|
||||
stats = Stats()
|
||||
runner = AnalysisRunner(log)
|
||||
runner.add_analyzer(stats)
|
||||
runner.add_analyzer(analyzer)
|
||||
runner.run()
|
||||
|
||||
# iterate scopes
|
||||
for sk,sv in stats.data.items():
|
||||
# iterate tracers
|
||||
for tk,tv in sv.items():
|
||||
mi = tv.get('min', '-')
|
||||
ma = tv.get('max', '-')
|
||||
avg = tv['sum']/tv['num']
|
||||
if is_time_field(tk):
|
||||
if mi != '-':
|
||||
mi = format_ts(mi)
|
||||
if ma != '-':
|
||||
ma = format_ts(ma)
|
||||
avg = format_ts(avg)
|
||||
print("%-45s: Min/Avg,Max %30s: %s, %s, %s" % (sk, tk, mi, avg, ma))
|
||||
if not args.list_classes:
|
||||
stats.report()
|
||||
|
|
|
@ -28,11 +28,14 @@ class AnalysisRunner(object):
|
|||
return (not event[Parser.F_LINE] and not event[Parser.F_FILENAME])
|
||||
|
||||
def run(self):
|
||||
for event in self.log:
|
||||
# check if it is a tracer.class or tracer event
|
||||
if self.is_tracer_class(event):
|
||||
self.handle_tracer_class(event)
|
||||
elif self.is_tracer_entry(event):
|
||||
self.handle_tracer_entry(event)
|
||||
#else:
|
||||
# print("unhandled:", repr(event))
|
||||
try:
|
||||
for event in self.log:
|
||||
# check if it is a tracer.class or tracer event
|
||||
if self.is_tracer_class(event):
|
||||
self.handle_tracer_class(event)
|
||||
elif self.is_tracer_entry(event):
|
||||
self.handle_tracer_entry(event)
|
||||
#else:
|
||||
# print("unhandled:", repr(event))
|
||||
except StopIteration:
|
||||
pass
|
||||
|
|
Loading…
Reference in a new issue