Replace Pyasciigraph by termgraph for freq graphs
[deliverable/lttng-analyses.git] / lttnganalyses / cli / command.py
CommitLineData
4ed24f86
JD
1# The MIT License (MIT)
2#
a3fa57c0 3# Copyright (C) 2015 - Julien Desfossez <jdesfossez@efficios.com>
cee855a2 4# 2015 - Philippe Proulx <pproulx@efficios.com>
0b250a71 5# 2015 - Antoine Busque <abusque@efficios.com>
4ed24f86
JD
6#
7# Permission is hereby granted, free of charge, to any person obtaining a copy
8# of this software and associated documentation files (the "Software"), to deal
9# in the Software without restriction, including without limitation the rights
10# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
11# copies of the Software, and to permit persons to whom the Software is
12# furnished to do so, subject to the following conditions:
13#
14# The above copyright notice and this permission notice shall be included in
15# all copies or substantial portions of the Software.
16#
17# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
18# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
19# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
20# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
21# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
22# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
23# SOFTWARE.
24
323b3fd6 25import argparse
a0acc08c 26import json
4ac5e240 27import os
a0acc08c 28import re
0b250a71
AB
29import sys
30import subprocess
31from babeltrace import TraceCollection
a0acc08c 32from . import mi
0b250a71
AB
33from .. import _version
34from . import progressbar
35from .. import __version__
3101128e 36from ..common import version_utils
0b250a71
AB
37from ..core import analysis
38from ..linuxautomaton import common
39from ..linuxautomaton import automaton
323b3fd6
PP
40
41
42class Command:
a0acc08c
PP
43 _MI_BASE_TAGS = ['linux-kernel', 'lttng-analyses']
44 _MI_AUTHORS = [
45 'Julien Desfossez',
46 'Antoine Busque',
47 'Philippe Proulx',
48 ]
49 _MI_URL = 'https://github.com/lttng/lttng-analyses'
50
51 def __init__(self, mi_mode=False):
b6d9132b
AB
52 self._analysis = None
53 self._analysis_conf = None
54 self._args = None
55 self._handles = None
56 self._traces = None
a0acc08c
PP
57 self._ticks = 0
58 self._mi_mode = mi_mode
323b3fd6 59 self._create_automaton()
a0acc08c
PP
60 self._mi_setup()
61
62 @property
63 def mi_mode(self):
64 return self._mi_mode
323b3fd6 65
b6d9132b 66 def run(self):
74d112b5
AB
67 try:
68 self._parse_args()
69 self._open_trace()
70 self._create_analysis()
71 self._run_analysis()
72 self._close_trace()
73 except KeyboardInterrupt:
74 sys.exit(0)
b6d9132b 75
323b3fd6 76 def _error(self, msg, exit_code=1):
d6c76c60
PP
77 try:
78 import termcolor
79
80 msg = termcolor.colored(msg, 'red', attrs=['bold'])
05684c5e 81 except ImportError:
d6c76c60
PP
82 pass
83
323b3fd6
PP
84 print(msg, file=sys.stderr)
85 sys.exit(exit_code)
86
87 def _gen_error(self, msg, exit_code=1):
88 self._error('Error: {}'.format(msg), exit_code)
89
90 def _cmdline_error(self, msg, exit_code=1):
91 self._error('Command line error: {}'.format(msg), exit_code)
92
a0acc08c
PP
93 def _print(self, msg):
94 if not self._mi_mode:
95 print(msg)
96
97 def _mi_create_result_table(self, table_class_name, begin, end,
98 subtitle=None):
99 return mi.ResultTable(self._mi_table_classes[table_class_name],
100 begin, end, subtitle)
101
102 def _mi_setup(self):
103 self._mi_table_classes = {}
104
105 for tc_tuple in self._MI_TABLE_CLASSES:
106 table_class = mi.TableClass(tc_tuple[0], tc_tuple[1], tc_tuple[2])
107 self._mi_table_classes[table_class.name] = table_class
108
109 self._mi_clear_result_tables()
110
111 def _mi_print_metadata(self):
112 tags = self._MI_BASE_TAGS + self._MI_TAGS
113 infos = mi.get_metadata(version=self._MI_VERSION, title=self._MI_TITLE,
114 description=self._MI_DESCRIPTION,
115 authors=self._MI_AUTHORS, url=self._MI_URL,
116 tags=tags,
117 table_classes=self._mi_table_classes.values())
118 print(json.dumps(infos))
119
120 def _mi_append_result_table(self, result_table):
121 if not result_table or not result_table.rows:
122 return
123
124 tc_name = result_table.table_class.name
125 self._mi_get_result_tables(tc_name).append(result_table)
126
127 def _mi_append_result_tables(self, result_tables):
128 if not result_tables:
129 return
130
131 for result_table in result_tables:
132 self._mi_append_result_table(result_table)
133
134 def _mi_clear_result_tables(self):
135 self._result_tables = {}
136
137 def _mi_get_result_tables(self, table_class_name):
138 if table_class_name not in self._result_tables:
139 self._result_tables[table_class_name] = []
140
141 return self._result_tables[table_class_name]
142
143 def _mi_print(self):
144 results = []
145
146 for result_tables in self._result_tables.values():
147 for result_table in result_tables:
148 results.append(result_table.to_native_object())
149
150 obj = {
151 'results': results,
152 }
153
154 print(json.dumps(obj))
155
156 def _create_summary_result_tables(self):
157 pass
158
bd3cd7c5
JD
159 def _open_trace(self):
160 traces = TraceCollection()
b6d9132b 161 handles = traces.add_traces_recursive(self._args.path, 'ctf')
ced36aab 162 if handles == {}:
b6d9132b 163 self._gen_error('Failed to open ' + self._args.path, -1)
ced36aab 164 self._handles = handles
bd3cd7c5 165 self._traces = traces
652bc6b7 166 self._process_date_args()
ee6a5866 167 self._read_tracer_version()
b6d9132b 168 if not self._args.skip_validation:
d3014022 169 self._check_lost_events()
bd3cd7c5
JD
170
171 def _close_trace(self):
ced36aab
AB
172 for handle in self._handles.values():
173 self._traces.remove_trace(handle)
bd3cd7c5 174
ee6a5866 175 def _read_tracer_version(self):
4ac5e240
AB
176 kernel_path = None
177 for root, _, _ in os.walk(self._args.path):
178 if root.endswith('kernel'):
179 kernel_path = root
180 break
181
182 if kernel_path is None:
183 self._gen_error('Could not find kernel trace directory')
184
ee6a5866
AB
185 try:
186 metadata = subprocess.getoutput(
4ac5e240 187 'babeltrace -o ctf-metadata "%s"' % kernel_path)
ee6a5866
AB
188 except subprocess.CalledProcessError:
189 self._gen_error('Cannot run babeltrace on the trace, cannot read'
190 ' tracer version')
191
192 major_match = re.search(r'tracer_major = (\d+)', metadata)
193 minor_match = re.search(r'tracer_minor = (\d+)', metadata)
194 patch_match = re.search(r'tracer_patchlevel = (\d+)', metadata)
195
196 if not major_match or not minor_match or not patch_match:
197 self._gen_error('Malformed metadata, cannot read tracer version')
198
199 self.state.tracer_version = version_utils.Version(
200 int(major_match.group(1)),
201 int(minor_match.group(1)),
202 int(patch_match.group(1)),
203 )
204
d3014022 205 def _check_lost_events(self):
a0acc08c 206 self._print('Checking the trace for lost events...')
d3014022 207 try:
e0bc16fe 208 subprocess.check_output('babeltrace "%s"' % self._args.path,
d3014022
JD
209 shell=True)
210 except subprocess.CalledProcessError:
b9f05f8d
AB
211 self._gen_error('Cannot run babeltrace on the trace, cannot verify'
212 ' if events were lost during the trace recording')
a0acc08c
PP
213
214 def _pre_analysis(self):
215 pass
216
217 def _post_analysis(self):
218 if not self._mi_mode:
219 return
220
221 if self._ticks > 1:
222 self._create_summary_result_tables()
223
224 self._mi_print()
d3014022 225
b6d9132b 226 def _run_analysis(self):
a0acc08c 227 self._pre_analysis()
bd3cd7c5 228 progressbar.progressbar_setup(self)
b6d9132b 229
bd3cd7c5
JD
230 for event in self._traces.events:
231 progressbar.progressbar_update(self)
bd3cd7c5 232 self._analysis.process_event(event)
b6d9132b
AB
233 if self._analysis.ended:
234 break
47ba125c 235 self._automaton.process_event(event)
bd3cd7c5 236
b6d9132b
AB
237 progressbar.progressbar_finish(self)
238 self._analysis.end()
a0acc08c 239 self._post_analysis()
bd3cd7c5 240
3664e4b0
AB
241 def _print_date(self, begin_ns, end_ns):
242 date = 'Timerange: [%s, %s]' % (
b6d9132b 243 common.ns_to_hour_nsec(begin_ns, gmt=self._args.gmt,
3664e4b0 244 multi_day=True),
b6d9132b 245 common.ns_to_hour_nsec(end_ns, gmt=self._args.gmt,
3664e4b0 246 multi_day=True))
a0acc08c 247 self._print(date)
3664e4b0 248
dbbdd963
PP
249 def _get_uniform_freq_values(self, durations):
250 if self._args.uniform_step is not None:
251 return (self._args.uniform_min, self._args.uniform_max,
252 self._args.uniform_step)
253
254 if self._args.min is not None:
255 self._args.uniform_min = self._args.min
256 else:
257 self._args.uniform_min = min(durations)
258 if self._args.max is not None:
259 self._args.uniform_max = self._args.max
260 else:
261 self._args.uniform_max = max(durations)
262
263 # ns to µs
264 self._args.uniform_min /= 1000
265 self._args.uniform_max /= 1000
266 self._args.uniform_step = (
267 (self._args.uniform_max - self._args.uniform_min) /
268 self._args.freq_resolution
269 )
270
271 return self._args.uniform_min, self._args.uniform_max, \
650e7f57 272 self._args.uniform_step
dbbdd963 273
bd3cd7c5 274 def _validate_transform_common_args(self, args):
83ad157b
AB
275 refresh_period_ns = None
276 if args.refresh is not None:
277 try:
278 refresh_period_ns = common.duration_str_to_ns(args.refresh)
279 except ValueError as e:
280 self._cmdline_error(str(e))
281
b6d9132b 282 self._analysis_conf = analysis.AnalysisConfig()
83ad157b 283 self._analysis_conf.refresh_period = refresh_period_ns
43a3c04c
AB
284 self._analysis_conf.period_begin_ev_name = args.period_begin
285 self._analysis_conf.period_end_ev_name = args.period_end
05684c5e
AB
286 self._analysis_conf.period_begin_key_fields = \
287 args.period_begin_key.split(',')
288
289 if args.period_end_key:
290 self._analysis_conf.period_end_key_fields = \
291 args.period_end_key.split(',')
292 else:
293 self._analysis_conf.period_end_key_fields = \
294 self._analysis_conf.period_begin_key_fields
295
296 if args.period_key_value:
297 self._analysis_conf.period_key_value = \
298 tuple(args.period_key_value.split(','))
299
a621ba35
AB
300 if args.cpu:
301 self._analysis_conf.cpu_list = args.cpu.split(',')
302 self._analysis_conf.cpu_list = [int(cpu) for cpu in
303 self._analysis_conf.cpu_list]
b6d9132b
AB
304
305 # convert min/max args from µs to ns, if needed
306 if hasattr(args, 'min') and args.min is not None:
307 args.min *= 1000
308 self._analysis_conf.min_duration = args.min
309 if hasattr(args, 'max') and args.max is not None:
310 args.max *= 1000
311 self._analysis_conf.max_duration = args.max
312
313 if hasattr(args, 'procname'):
47ba125c 314 if args.procname:
43b66dd6 315 self._analysis_conf.proc_list = args.procname.split(',')
28ad5ec8 316
43b66dd6
AB
317 if hasattr(args, 'tid'):
318 if args.tid:
319 self._analysis_conf.tid_list = args.tid.split(',')
320 self._analysis_conf.tid_list = [int(tid) for tid in
321 self._analysis_conf.tid_list]
f89605f0 322
1a68e04c
AB
323 if hasattr(args, 'freq'):
324 args.uniform_min = None
325 args.uniform_max = None
326 args.uniform_step = None
327
dbbdd963
PP
328 if args.freq_series:
329 # implies uniform buckets
330 args.freq_uniform = True
331
a0acc08c
PP
332 if self._mi_mode:
333 # force no progress in MI mode
334 args.no_progress = True
335
336 # print MI metadata if required
337 if args.metadata:
338 self._mi_print_metadata()
339 sys.exit(0)
340
341 # validate path argument (required at this point)
342 if not args.path:
343 self._cmdline_error('Please specify a trace path')
344
345 if type(args.path) is list:
346 args.path = args.path[0]
347
b6d9132b
AB
348 def _validate_transform_args(self, args):
349 pass
f89605f0 350
323b3fd6
PP
351 def _parse_args(self):
352 ap = argparse.ArgumentParser(description=self._DESC)
353
354 # common arguments
83ad157b
AB
355 ap.add_argument('-r', '--refresh', type=str,
356 help='Refresh period, with optional units suffix '
357 '(default units: s)')
a0acc08c
PP
358 ap.add_argument('--gmt', action='store_true',
359 help='Manipulate timestamps based on GMT instead '
360 'of local time')
73b71522 361 ap.add_argument('--skip-validation', action='store_true',
d3014022 362 help='Skip the trace validation')
bd3cd7c5
JD
363 ap.add_argument('--begin', type=str, help='start time: '
364 'hh:mm:ss[.nnnnnnnnn]')
365 ap.add_argument('--end', type=str, help='end time: '
366 'hh:mm:ss[.nnnnnnnnn]')
43a3c04c
AB
367 ap.add_argument('--period-begin', type=str,
368 help='Analysis period start marker event name')
369 ap.add_argument('--period-end', type=str,
370 help='Analysis period end marker event name '
371 '(requires --period-begin)')
05684c5e 372 ap.add_argument('--period-begin-key', type=str, default='cpu_id',
b9f05f8d
AB
373 help='Optional, list of event field names used to '
374 'match period markers (default: cpu_id)')
05684c5e
AB
375 ap.add_argument('--period-end-key', type=str,
376 help='Optional, list of event field names used to '
377 'match period marker. If none specified, use the same '
378 ' --period-begin-key')
379 ap.add_argument('--period-key-value', type=str,
380 help='Optional, define a fixed key value to which a'
381 ' period must correspond to be considered.')
a621ba35
AB
382 ap.add_argument('--cpu', type=str,
383 help='Filter the results only for this list of '
384 'CPU IDs')
a0acc08c
PP
385 ap.add_argument('--timerange', type=str, help='time range: '
386 '[begin,end]')
323b3fd6 387 ap.add_argument('-V', '--version', action='version',
d97f5cb2 388 version='LTTng Analyses v' + __version__)
323b3fd6 389
a0acc08c
PP
390 # MI mode-dependent arguments
391 if self._mi_mode:
392 ap.add_argument('--metadata', action='store_true',
b9f05f8d
AB
393 help='Show analysis\'s metadata')
394 ap.add_argument('path', metavar='<path/to/trace>',
395 help='trace path', nargs='*')
a0acc08c
PP
396 else:
397 ap.add_argument('--no-progress', action='store_true',
398 help='Don\'t display the progress bar')
b9f05f8d
AB
399 ap.add_argument('path', metavar='<path/to/trace>',
400 help='trace path')
a0acc08c 401
b6d9132b
AB
402 # Used to add command-specific args
403 self._add_arguments(ap)
323b3fd6 404
b6d9132b 405 args = ap.parse_args()
bd3cd7c5 406 self._validate_transform_common_args(args)
b6d9132b 407 self._validate_transform_args(args)
323b3fd6
PP
408 self._args = args
409
b6d9132b
AB
410 @staticmethod
411 def _add_proc_filter_args(ap):
412 ap.add_argument('--procname', type=str,
413 help='Filter the results only for this list of '
414 'process names')
43b66dd6
AB
415 ap.add_argument('--tid', type=str,
416 help='Filter the results only for this list of TIDs')
b6d9132b
AB
417
418 @staticmethod
419 def _add_min_max_args(ap):
420 ap.add_argument('--min', type=float,
421 help='Filter out durations shorter than min usec')
422 ap.add_argument('--max', type=float,
423 help='Filter out durations longer than max usec')
424
425 @staticmethod
426 def _add_freq_args(ap, help=None):
427 if not help:
428 help = 'Output the frequency distribution'
429
430 ap.add_argument('--freq', action='store_true', help=help)
431 ap.add_argument('--freq-resolution', type=int, default=20,
432 help='Frequency distribution resolution '
433 '(default 20)')
1a68e04c
AB
434 ap.add_argument('--freq-uniform', action='store_true',
435 help='Use a uniform resolution across distributions')
86ea0394 436 ap.add_argument('--freq-series', action='store_true',
650e7f57
AB
437 help='Consolidate frequency distribution histogram '
438 'as a single one')
b6d9132b
AB
439
440 @staticmethod
441 def _add_log_args(ap, help=None):
442 if not help:
443 help = 'Output the events in chronological order'
444
445 ap.add_argument('--log', action='store_true', help=help)
446
b9f05f8d
AB
447 @staticmethod
448 def _add_top_args(ap, help=None):
449 if not help:
450 help = 'Output the top results'
451
452 ap.add_argument('--limit', type=int, default=10,
453 help='Limit to top X (default = 10)')
454 ap.add_argument('--top', action='store_true', help=help)
455
b6d9132b
AB
456 @staticmethod
457 def _add_stats_args(ap, help=None):
458 if not help:
459 help = 'Output statistics'
460
461 ap.add_argument('--stats', action='store_true', help=help)
462
463 def _add_arguments(self, ap):
464 pass
465
652bc6b7 466 def _process_date_args(self):
b6d9132b
AB
467 def date_to_epoch_nsec(date):
468 ts = common.date_to_epoch_nsec(self._handles, date, self._args.gmt)
469 if ts is None:
470 self._cmdline_error('Invalid date format: "{}"'.format(date))
471
472 return ts
473
474 self._args.multi_day = common.is_multi_day_trace_collection(
652bc6b7 475 self._handles)
602ac199
PP
476 begin_ts = None
477 end_ts = None
478
479 if self._args.timerange:
480 begin_ts, end_ts = common.extract_timerange(self._handles,
481 self._args.timerange,
482 self._args.gmt)
483 if None in [begin_ts, end_ts]:
b9f05f8d
AB
484 self._cmdline_error(
485 'Invalid time format: "{}"'.format(self._args.timerange))
652bc6b7 486 else:
b6d9132b 487 if self._args.begin:
602ac199 488 begin_ts = date_to_epoch_nsec(self._args.begin)
b6d9132b 489 if self._args.end:
602ac199 490 end_ts = date_to_epoch_nsec(self._args.end)
652bc6b7 491
93c7af7d
AB
492 # We have to check if timestamp_begin is None, which
493 # it always is in older versions of babeltrace. In
494 # that case, the test is simply skipped and an invalid
495 # --end value will cause an empty analysis
496 if self._traces.timestamp_begin is not None and \
602ac199 497 end_ts < self._traces.timestamp_begin:
b6d9132b
AB
498 self._cmdline_error(
499 '--end timestamp before beginning of trace')
500
602ac199
PP
501 self._analysis_conf.begin_ts = begin_ts
502 self._analysis_conf.end_ts = end_ts
b6d9132b
AB
503
504 def _create_analysis(self):
505 notification_cbs = {
a0acc08c 506 analysis.Analysis.TICK_CB: self._analysis_tick_cb
b6d9132b
AB
507 }
508
509 self._analysis = self._ANALYSIS_CLASS(self.state, self._analysis_conf)
510 self._analysis.register_notification_cbs(notification_cbs)
93c7af7d 511
323b3fd6 512 def _create_automaton(self):
56936af2 513 self._automaton = automaton.Automaton()
6e01ed18 514 self.state = self._automaton.state
bfb81992 515
a0acc08c 516 def _analysis_tick_cb(self, **kwargs):
b6d9132b
AB
517 begin_ns = kwargs['begin_ns']
518 end_ns = kwargs['end_ns']
519
a0acc08c
PP
520 self._analysis_tick(begin_ns, end_ns)
521 self._ticks += 1
b6d9132b 522
a0acc08c 523 def _analysis_tick(self, begin_ns, end_ns):
b6d9132b
AB
524 raise NotImplementedError()
525
a0acc08c
PP
526
527# create MI version
528_cmd_version = _version.get_versions()['version']
529_version_match = re.match(r'(\d+)\.(\d+)\.(\d+)(.*)', _cmd_version)
3101128e 530Command._MI_VERSION = version_utils.Version(
a0acc08c
PP
531 int(_version_match.group(1)),
532 int(_version_match.group(2)),
533 int(_version_match.group(3)),
534 _version_match.group(4),
3101128e 535)
This page took 0.05205 seconds and 5 git commands to generate.