aboutsummaryrefslogtreecommitdiffstats
path: root/tools/perf/scripts/python
diff options
context:
space:
mode:
Diffstat (limited to 'tools/perf/scripts/python')
-rw-r--r--tools/perf/scripts/python/export-to-postgresql.py61
-rw-r--r--tools/perf/scripts/python/export-to-sqlite.py26
-rwxr-xr-xtools/perf/scripts/python/exported-sql-viewer.py42
3 files changed, 89 insertions, 40 deletions
diff --git a/tools/perf/scripts/python/export-to-postgresql.py b/tools/perf/scripts/python/export-to-postgresql.py
index 390a351d15ea..c3eae1d77d36 100644
--- a/tools/perf/scripts/python/export-to-postgresql.py
+++ b/tools/perf/scripts/python/export-to-postgresql.py
@@ -10,6 +10,8 @@
10# FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for 10# FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
11# more details. 11# more details.
12 12
13from __future__ import print_function
14
13import os 15import os
14import sys 16import sys
15import struct 17import struct
@@ -199,6 +201,18 @@ import datetime
199 201
200from PySide.QtSql import * 202from PySide.QtSql import *
201 203
204if sys.version_info < (3, 0):
205 def toserverstr(str):
206 return str
207 def toclientstr(str):
208 return str
209else:
210 # Assume UTF-8 server_encoding and client_encoding
211 def toserverstr(str):
212 return bytes(str, "UTF_8")
213 def toclientstr(str):
214 return bytes(str, "UTF_8")
215
202# Need to access PostgreSQL C library directly to use COPY FROM STDIN 216# Need to access PostgreSQL C library directly to use COPY FROM STDIN
203from ctypes import * 217from ctypes import *
204libpq = CDLL("libpq.so.5") 218libpq = CDLL("libpq.so.5")
@@ -234,12 +248,17 @@ perf_db_export_mode = True
234perf_db_export_calls = False 248perf_db_export_calls = False
235perf_db_export_callchains = False 249perf_db_export_callchains = False
236 250
251def printerr(*args, **kw_args):
252 print(*args, file=sys.stderr, **kw_args)
253
254def printdate(*args, **kw_args):
255 print(datetime.datetime.today(), *args, sep=' ', **kw_args)
237 256
238def usage(): 257def usage():
239 print >> sys.stderr, "Usage is: export-to-postgresql.py <database name> [<columns>] [<calls>] [<callchains>]" 258 printerr("Usage is: export-to-postgresql.py <database name> [<columns>] [<calls>] [<callchains>]")
240 print >> sys.stderr, "where: columns 'all' or 'branches'" 259 printerr("where: columns 'all' or 'branches'")
241 print >> sys.stderr, " calls 'calls' => create calls and call_paths table" 260 printerr(" calls 'calls' => create calls and call_paths table")
242 print >> sys.stderr, " callchains 'callchains' => create call_paths table" 261 printerr(" callchains 'callchains' => create call_paths table")
243 raise Exception("Too few arguments") 262 raise Exception("Too few arguments")
244 263
245if (len(sys.argv) < 2): 264if (len(sys.argv) < 2):
@@ -273,7 +292,7 @@ def do_query(q, s):
273 return 292 return
274 raise Exception("Query failed: " + q.lastError().text()) 293 raise Exception("Query failed: " + q.lastError().text())
275 294
276print datetime.datetime.today(), "Creating database..." 295printdate("Creating database...")
277 296
278db = QSqlDatabase.addDatabase('QPSQL') 297db = QSqlDatabase.addDatabase('QPSQL')
279query = QSqlQuery(db) 298query = QSqlQuery(db)
@@ -506,12 +525,12 @@ do_query(query, 'CREATE VIEW samples_view AS '
506 ' FROM samples') 525 ' FROM samples')
507 526
508 527
509file_header = struct.pack("!11sii", "PGCOPY\n\377\r\n\0", 0, 0) 528file_header = struct.pack("!11sii", b"PGCOPY\n\377\r\n\0", 0, 0)
510file_trailer = "\377\377" 529file_trailer = b"\377\377"
511 530
512def open_output_file(file_name): 531def open_output_file(file_name):
513 path_name = output_dir_name + "/" + file_name 532 path_name = output_dir_name + "/" + file_name
514 file = open(path_name, "w+") 533 file = open(path_name, "wb+")
515 file.write(file_header) 534 file.write(file_header)
516 return file 535 return file
517 536
@@ -526,13 +545,13 @@ def copy_output_file_direct(file, table_name):
526 545
527# Use COPY FROM STDIN because security may prevent postgres from accessing the files directly 546# Use COPY FROM STDIN because security may prevent postgres from accessing the files directly
528def copy_output_file(file, table_name): 547def copy_output_file(file, table_name):
529 conn = PQconnectdb("dbname = " + dbname) 548 conn = PQconnectdb(toclientstr("dbname = " + dbname))
530 if (PQstatus(conn)): 549 if (PQstatus(conn)):
531 raise Exception("COPY FROM STDIN PQconnectdb failed") 550 raise Exception("COPY FROM STDIN PQconnectdb failed")
532 file.write(file_trailer) 551 file.write(file_trailer)
533 file.seek(0) 552 file.seek(0)
534 sql = "COPY " + table_name + " FROM STDIN (FORMAT 'binary')" 553 sql = "COPY " + table_name + " FROM STDIN (FORMAT 'binary')"
535 res = PQexec(conn, sql) 554 res = PQexec(conn, toclientstr(sql))
536 if (PQresultStatus(res) != 4): 555 if (PQresultStatus(res) != 4):
537 raise Exception("COPY FROM STDIN PQexec failed") 556 raise Exception("COPY FROM STDIN PQexec failed")
538 data = file.read(65536) 557 data = file.read(65536)
@@ -566,7 +585,7 @@ if perf_db_export_calls:
566 call_file = open_output_file("call_table.bin") 585 call_file = open_output_file("call_table.bin")
567 586
568def trace_begin(): 587def trace_begin():
569 print datetime.datetime.today(), "Writing to intermediate files..." 588 printdate("Writing to intermediate files...")
570 # id == 0 means unknown. It is easier to create records for them than replace the zeroes with NULLs 589 # id == 0 means unknown. It is easier to create records for them than replace the zeroes with NULLs
571 evsel_table(0, "unknown") 590 evsel_table(0, "unknown")
572 machine_table(0, 0, "unknown") 591 machine_table(0, 0, "unknown")
@@ -582,7 +601,7 @@ def trace_begin():
582unhandled_count = 0 601unhandled_count = 0
583 602
584def trace_end(): 603def trace_end():
585 print datetime.datetime.today(), "Copying to database..." 604 printdate("Copying to database...")
586 copy_output_file(evsel_file, "selected_events") 605 copy_output_file(evsel_file, "selected_events")
587 copy_output_file(machine_file, "machines") 606 copy_output_file(machine_file, "machines")
588 copy_output_file(thread_file, "threads") 607 copy_output_file(thread_file, "threads")
@@ -597,7 +616,7 @@ def trace_end():
597 if perf_db_export_calls: 616 if perf_db_export_calls:
598 copy_output_file(call_file, "calls") 617 copy_output_file(call_file, "calls")
599 618
600 print datetime.datetime.today(), "Removing intermediate files..." 619 printdate("Removing intermediate files...")
601 remove_output_file(evsel_file) 620 remove_output_file(evsel_file)
602 remove_output_file(machine_file) 621 remove_output_file(machine_file)
603 remove_output_file(thread_file) 622 remove_output_file(thread_file)
@@ -612,7 +631,7 @@ def trace_end():
612 if perf_db_export_calls: 631 if perf_db_export_calls:
613 remove_output_file(call_file) 632 remove_output_file(call_file)
614 os.rmdir(output_dir_name) 633 os.rmdir(output_dir_name)
615 print datetime.datetime.today(), "Adding primary keys" 634 printdate("Adding primary keys")
616 do_query(query, 'ALTER TABLE selected_events ADD PRIMARY KEY (id)') 635 do_query(query, 'ALTER TABLE selected_events ADD PRIMARY KEY (id)')
617 do_query(query, 'ALTER TABLE machines ADD PRIMARY KEY (id)') 636 do_query(query, 'ALTER TABLE machines ADD PRIMARY KEY (id)')
618 do_query(query, 'ALTER TABLE threads ADD PRIMARY KEY (id)') 637 do_query(query, 'ALTER TABLE threads ADD PRIMARY KEY (id)')
@@ -627,7 +646,7 @@ def trace_end():
627 if perf_db_export_calls: 646 if perf_db_export_calls:
628 do_query(query, 'ALTER TABLE calls ADD PRIMARY KEY (id)') 647 do_query(query, 'ALTER TABLE calls ADD PRIMARY KEY (id)')
629 648
630 print datetime.datetime.today(), "Adding foreign keys" 649 printdate("Adding foreign keys")
631 do_query(query, 'ALTER TABLE threads ' 650 do_query(query, 'ALTER TABLE threads '
632 'ADD CONSTRAINT machinefk FOREIGN KEY (machine_id) REFERENCES machines (id),' 651 'ADD CONSTRAINT machinefk FOREIGN KEY (machine_id) REFERENCES machines (id),'
633 'ADD CONSTRAINT processfk FOREIGN KEY (process_id) REFERENCES threads (id)') 652 'ADD CONSTRAINT processfk FOREIGN KEY (process_id) REFERENCES threads (id)')
@@ -663,8 +682,8 @@ def trace_end():
663 do_query(query, 'CREATE INDEX pid_idx ON calls (parent_id)') 682 do_query(query, 'CREATE INDEX pid_idx ON calls (parent_id)')
664 683
665 if (unhandled_count): 684 if (unhandled_count):
666 print datetime.datetime.today(), "Warning: ", unhandled_count, " unhandled events" 685 printdate("Warning: ", unhandled_count, " unhandled events")
667 print datetime.datetime.today(), "Done" 686 printdate("Done")
668 687
669def trace_unhandled(event_name, context, event_fields_dict): 688def trace_unhandled(event_name, context, event_fields_dict):
670 global unhandled_count 689 global unhandled_count
@@ -674,12 +693,14 @@ def sched__sched_switch(*x):
674 pass 693 pass
675 694
676def evsel_table(evsel_id, evsel_name, *x): 695def evsel_table(evsel_id, evsel_name, *x):
696 evsel_name = toserverstr(evsel_name)
677 n = len(evsel_name) 697 n = len(evsel_name)
678 fmt = "!hiqi" + str(n) + "s" 698 fmt = "!hiqi" + str(n) + "s"
679 value = struct.pack(fmt, 2, 8, evsel_id, n, evsel_name) 699 value = struct.pack(fmt, 2, 8, evsel_id, n, evsel_name)
680 evsel_file.write(value) 700 evsel_file.write(value)
681 701
682def machine_table(machine_id, pid, root_dir, *x): 702def machine_table(machine_id, pid, root_dir, *x):
703 root_dir = toserverstr(root_dir)
683 n = len(root_dir) 704 n = len(root_dir)
684 fmt = "!hiqiii" + str(n) + "s" 705 fmt = "!hiqiii" + str(n) + "s"
685 value = struct.pack(fmt, 3, 8, machine_id, 4, pid, n, root_dir) 706 value = struct.pack(fmt, 3, 8, machine_id, 4, pid, n, root_dir)
@@ -690,6 +711,7 @@ def thread_table(thread_id, machine_id, process_id, pid, tid, *x):
690 thread_file.write(value) 711 thread_file.write(value)
691 712
692def comm_table(comm_id, comm_str, *x): 713def comm_table(comm_id, comm_str, *x):
714 comm_str = toserverstr(comm_str)
693 n = len(comm_str) 715 n = len(comm_str)
694 fmt = "!hiqi" + str(n) + "s" 716 fmt = "!hiqi" + str(n) + "s"
695 value = struct.pack(fmt, 2, 8, comm_id, n, comm_str) 717 value = struct.pack(fmt, 2, 8, comm_id, n, comm_str)
@@ -701,6 +723,9 @@ def comm_thread_table(comm_thread_id, comm_id, thread_id, *x):
701 comm_thread_file.write(value) 723 comm_thread_file.write(value)
702 724
703def dso_table(dso_id, machine_id, short_name, long_name, build_id, *x): 725def dso_table(dso_id, machine_id, short_name, long_name, build_id, *x):
726 short_name = toserverstr(short_name)
727 long_name = toserverstr(long_name)
728 build_id = toserverstr(build_id)
704 n1 = len(short_name) 729 n1 = len(short_name)
705 n2 = len(long_name) 730 n2 = len(long_name)
706 n3 = len(build_id) 731 n3 = len(build_id)
@@ -709,12 +734,14 @@ def dso_table(dso_id, machine_id, short_name, long_name, build_id, *x):
709 dso_file.write(value) 734 dso_file.write(value)
710 735
711def symbol_table(symbol_id, dso_id, sym_start, sym_end, binding, symbol_name, *x): 736def symbol_table(symbol_id, dso_id, sym_start, sym_end, binding, symbol_name, *x):
737 symbol_name = toserverstr(symbol_name)
712 n = len(symbol_name) 738 n = len(symbol_name)
713 fmt = "!hiqiqiqiqiii" + str(n) + "s" 739 fmt = "!hiqiqiqiqiii" + str(n) + "s"
714 value = struct.pack(fmt, 6, 8, symbol_id, 8, dso_id, 8, sym_start, 8, sym_end, 4, binding, n, symbol_name) 740 value = struct.pack(fmt, 6, 8, symbol_id, 8, dso_id, 8, sym_start, 8, sym_end, 4, binding, n, symbol_name)
715 symbol_file.write(value) 741 symbol_file.write(value)
716 742
717def branch_type_table(branch_type, name, *x): 743def branch_type_table(branch_type, name, *x):
744 name = toserverstr(name)
718 n = len(name) 745 n = len(name)
719 fmt = "!hiii" + str(n) + "s" 746 fmt = "!hiii" + str(n) + "s"
720 value = struct.pack(fmt, 2, 4, branch_type, n, name) 747 value = struct.pack(fmt, 2, 4, branch_type, n, name)
diff --git a/tools/perf/scripts/python/export-to-sqlite.py b/tools/perf/scripts/python/export-to-sqlite.py
index eb63e6c7107f..3b71902a5a21 100644
--- a/tools/perf/scripts/python/export-to-sqlite.py
+++ b/tools/perf/scripts/python/export-to-sqlite.py
@@ -10,6 +10,8 @@
10# FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for 10# FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
11# more details. 11# more details.
12 12
13from __future__ import print_function
14
13import os 15import os
14import sys 16import sys
15import struct 17import struct
@@ -60,11 +62,17 @@ perf_db_export_mode = True
60perf_db_export_calls = False 62perf_db_export_calls = False
61perf_db_export_callchains = False 63perf_db_export_callchains = False
62 64
65def printerr(*args, **keyword_args):
66 print(*args, file=sys.stderr, **keyword_args)
67
68def printdate(*args, **kw_args):
69 print(datetime.datetime.today(), *args, sep=' ', **kw_args)
70
63def usage(): 71def usage():
64 print >> sys.stderr, "Usage is: export-to-sqlite.py <database name> [<columns>] [<calls>] [<callchains>]" 72 printerr("Usage is: export-to-sqlite.py <database name> [<columns>] [<calls>] [<callchains>]");
65 print >> sys.stderr, "where: columns 'all' or 'branches'" 73 printerr("where: columns 'all' or 'branches'");
66 print >> sys.stderr, " calls 'calls' => create calls and call_paths table" 74 printerr(" calls 'calls' => create calls and call_paths table");
67 print >> sys.stderr, " callchains 'callchains' => create call_paths table" 75 printerr(" callchains 'callchains' => create call_paths table");
68 raise Exception("Too few arguments") 76 raise Exception("Too few arguments")
69 77
70if (len(sys.argv) < 2): 78if (len(sys.argv) < 2):
@@ -100,7 +108,7 @@ def do_query_(q):
100 return 108 return
101 raise Exception("Query failed: " + q.lastError().text()) 109 raise Exception("Query failed: " + q.lastError().text())
102 110
103print datetime.datetime.today(), "Creating database..." 111printdate("Creating database ...")
104 112
105db_exists = False 113db_exists = False
106try: 114try:
@@ -378,7 +386,7 @@ if perf_db_export_calls:
378 call_query.prepare("INSERT INTO calls VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)") 386 call_query.prepare("INSERT INTO calls VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)")
379 387
380def trace_begin(): 388def trace_begin():
381 print datetime.datetime.today(), "Writing records..." 389 printdate("Writing records...")
382 do_query(query, 'BEGIN TRANSACTION') 390 do_query(query, 'BEGIN TRANSACTION')
383 # id == 0 means unknown. It is easier to create records for them than replace the zeroes with NULLs 391 # id == 0 means unknown. It is easier to create records for them than replace the zeroes with NULLs
384 evsel_table(0, "unknown") 392 evsel_table(0, "unknown")
@@ -397,14 +405,14 @@ unhandled_count = 0
397def trace_end(): 405def trace_end():
398 do_query(query, 'END TRANSACTION') 406 do_query(query, 'END TRANSACTION')
399 407
400 print datetime.datetime.today(), "Adding indexes" 408 printdate("Adding indexes")
401 if perf_db_export_calls: 409 if perf_db_export_calls:
402 do_query(query, 'CREATE INDEX pcpid_idx ON calls (parent_call_path_id)') 410 do_query(query, 'CREATE INDEX pcpid_idx ON calls (parent_call_path_id)')
403 do_query(query, 'CREATE INDEX pid_idx ON calls (parent_id)') 411 do_query(query, 'CREATE INDEX pid_idx ON calls (parent_id)')
404 412
405 if (unhandled_count): 413 if (unhandled_count):
406 print datetime.datetime.today(), "Warning: ", unhandled_count, " unhandled events" 414 printdate("Warning: ", unhandled_count, " unhandled events")
407 print datetime.datetime.today(), "Done" 415 printdate("Done")
408 416
409def trace_unhandled(event_name, context, event_fields_dict): 417def trace_unhandled(event_name, context, event_fields_dict):
410 global unhandled_count 418 global unhandled_count
diff --git a/tools/perf/scripts/python/exported-sql-viewer.py b/tools/perf/scripts/python/exported-sql-viewer.py
index afec9479ca7f..e38518cdcbc3 100755
--- a/tools/perf/scripts/python/exported-sql-viewer.py
+++ b/tools/perf/scripts/python/exported-sql-viewer.py
@@ -88,11 +88,20 @@
88# 7fab593ea956 48 89 15 3b 13 22 00 movq %rdx, 0x22133b(%rip) 88# 7fab593ea956 48 89 15 3b 13 22 00 movq %rdx, 0x22133b(%rip)
89# 8107675243232 2 ls 22011 22011 hardware interrupt No 7fab593ea956 _dl_start+0x26 (ld-2.19.so) -> ffffffff86a012e0 page_fault ([kernel]) 89# 8107675243232 2 ls 22011 22011 hardware interrupt No 7fab593ea956 _dl_start+0x26 (ld-2.19.so) -> ffffffff86a012e0 page_fault ([kernel])
90 90
91from __future__ import print_function
92
91import sys 93import sys
92import weakref 94import weakref
93import threading 95import threading
94import string 96import string
95import cPickle 97try:
98 # Python2
99 import cPickle as pickle
100 # size of pickled integer big enough for record size
101 glb_nsz = 8
102except ImportError:
103 import pickle
104 glb_nsz = 16
96import re 105import re
97import os 106import os
98from PySide.QtCore import * 107from PySide.QtCore import *
@@ -102,6 +111,15 @@ from decimal import *
102from ctypes import * 111from ctypes import *
103from multiprocessing import Process, Array, Value, Event 112from multiprocessing import Process, Array, Value, Event
104 113
114# xrange is range in Python3
115try:
116 xrange
117except NameError:
118 xrange = range
119
120def printerr(*args, **keyword_args):
121 print(*args, file=sys.stderr, **keyword_args)
122
105# Data formatting helpers 123# Data formatting helpers
106 124
107def tohex(ip): 125def tohex(ip):
@@ -1004,10 +1022,6 @@ class ChildDataItemFinder():
1004 1022
1005glb_chunk_sz = 10000 1023glb_chunk_sz = 10000
1006 1024
1007# size of pickled integer big enough for record size
1008
1009glb_nsz = 8
1010
1011# Background process for SQL data fetcher 1025# Background process for SQL data fetcher
1012 1026
1013class SQLFetcherProcess(): 1027class SQLFetcherProcess():
@@ -1066,7 +1080,7 @@ class SQLFetcherProcess():
1066 return True 1080 return True
1067 if space >= glb_nsz: 1081 if space >= glb_nsz:
1068 # Use 0 (or space < glb_nsz) to mean there is no more at the top of the buffer 1082 # Use 0 (or space < glb_nsz) to mean there is no more at the top of the buffer
1069 nd = cPickle.dumps(0, cPickle.HIGHEST_PROTOCOL) 1083 nd = pickle.dumps(0, pickle.HIGHEST_PROTOCOL)
1070 self.buffer[self.local_head : self.local_head + len(nd)] = nd 1084 self.buffer[self.local_head : self.local_head + len(nd)] = nd
1071 self.local_head = 0 1085 self.local_head = 0
1072 if self.local_tail - self.local_head > sz: 1086 if self.local_tail - self.local_head > sz:
@@ -1084,9 +1098,9 @@ class SQLFetcherProcess():
1084 self.wait_event.wait() 1098 self.wait_event.wait()
1085 1099
1086 def AddToBuffer(self, obj): 1100 def AddToBuffer(self, obj):
1087 d = cPickle.dumps(obj, cPickle.HIGHEST_PROTOCOL) 1101 d = pickle.dumps(obj, pickle.HIGHEST_PROTOCOL)
1088 n = len(d) 1102 n = len(d)
1089 nd = cPickle.dumps(n, cPickle.HIGHEST_PROTOCOL) 1103 nd = pickle.dumps(n, pickle.HIGHEST_PROTOCOL)
1090 sz = n + glb_nsz 1104 sz = n + glb_nsz
1091 self.WaitForSpace(sz) 1105 self.WaitForSpace(sz)
1092 pos = self.local_head 1106 pos = self.local_head
@@ -1198,12 +1212,12 @@ class SQLFetcher(QObject):
1198 pos = self.local_tail 1212 pos = self.local_tail
1199 if len(self.buffer) - pos < glb_nsz: 1213 if len(self.buffer) - pos < glb_nsz:
1200 pos = 0 1214 pos = 0
1201 n = cPickle.loads(self.buffer[pos : pos + glb_nsz]) 1215 n = pickle.loads(self.buffer[pos : pos + glb_nsz])
1202 if n == 0: 1216 if n == 0:
1203 pos = 0 1217 pos = 0
1204 n = cPickle.loads(self.buffer[0 : glb_nsz]) 1218 n = pickle.loads(self.buffer[0 : glb_nsz])
1205 pos += glb_nsz 1219 pos += glb_nsz
1206 obj = cPickle.loads(self.buffer[pos : pos + n]) 1220 obj = pickle.loads(self.buffer[pos : pos + n])
1207 self.local_tail = pos + n 1221 self.local_tail = pos + n
1208 return obj 1222 return obj
1209 1223
@@ -2973,7 +2987,7 @@ class DBRef():
2973 2987
2974def Main(): 2988def Main():
2975 if (len(sys.argv) < 2): 2989 if (len(sys.argv) < 2):
2976 print >> sys.stderr, "Usage is: exported-sql-viewer.py {<database name> | --help-only}" 2990 printerr("Usage is: exported-sql-viewer.py {<database name> | --help-only}");
2977 raise Exception("Too few arguments") 2991 raise Exception("Too few arguments")
2978 2992
2979 dbname = sys.argv[1] 2993 dbname = sys.argv[1]
@@ -2986,8 +3000,8 @@ def Main():
2986 3000
2987 is_sqlite3 = False 3001 is_sqlite3 = False
2988 try: 3002 try:
2989 f = open(dbname) 3003 f = open(dbname, "rb")
2990 if f.read(15) == "SQLite format 3": 3004 if f.read(15) == b'SQLite format 3':
2991 is_sqlite3 = True 3005 is_sqlite3 = True
2992 f.close() 3006 f.close()
2993 except: 3007 except: