diff options
Diffstat (limited to 'tools/perf/scripts')
3 files changed, 392 insertions, 0 deletions
diff --git a/tools/perf/scripts/python/bin/export-to-postgresql-record b/tools/perf/scripts/python/bin/export-to-postgresql-record new file mode 100644 index 000000000000..221d66e05713 --- /dev/null +++ b/tools/perf/scripts/python/bin/export-to-postgresql-record | |||
| @@ -0,0 +1,8 @@ | |||
| 1 | #!/bin/bash | ||
| 2 | |||
| 3 | # | ||
| 4 | # export perf data to a postgresql database. Can cover | ||
| 5 | # perf ip samples (excluding the tracepoints). No special | ||
| 6 | # record requirements, just record what you want to export. | ||
| 7 | # | ||
| 8 | perf record $@ | ||
diff --git a/tools/perf/scripts/python/bin/export-to-postgresql-report b/tools/perf/scripts/python/bin/export-to-postgresql-report new file mode 100644 index 000000000000..a8fdd15f85bf --- /dev/null +++ b/tools/perf/scripts/python/bin/export-to-postgresql-report | |||
| @@ -0,0 +1,24 @@ | |||
| 1 | #!/bin/bash | ||
| 2 | # description: export perf data to a postgresql database | ||
| 3 | # args: [database name] [columns] | ||
| 4 | n_args=0 | ||
| 5 | for i in "$@" | ||
| 6 | do | ||
| 7 | if expr match "$i" "-" > /dev/null ; then | ||
| 8 | break | ||
| 9 | fi | ||
| 10 | n_args=$(( $n_args + 1 )) | ||
| 11 | done | ||
| 12 | if [ "$n_args" -gt 2 ] ; then | ||
| 13 | echo "usage: export-to-postgresql-report [database name] [columns]" | ||
| 14 | exit | ||
| 15 | fi | ||
| 16 | if [ "$n_args" -gt 1 ] ; then | ||
| 17 | dbname=$1 | ||
| 18 | columns=$2 | ||
| 19 | shift 2 | ||
| 20 | elif [ "$n_args" -gt 0 ] ; then | ||
| 21 | dbname=$1 | ||
| 22 | shift | ||
| 23 | fi | ||
| 24 | perf script $@ -s "$PERF_EXEC_PATH"/scripts/python/export-to-postgresql.py $dbname $columns | ||
diff --git a/tools/perf/scripts/python/export-to-postgresql.py b/tools/perf/scripts/python/export-to-postgresql.py new file mode 100644 index 000000000000..d8f6df0093d6 --- /dev/null +++ b/tools/perf/scripts/python/export-to-postgresql.py | |||
| @@ -0,0 +1,360 @@ | |||
| 1 | # export-to-postgresql.py: export perf data to a postgresql database | ||
| 2 | # Copyright (c) 2014, Intel Corporation. | ||
| 3 | # | ||
| 4 | # This program is free software; you can redistribute it and/or modify it | ||
| 5 | # under the terms and conditions of the GNU General Public License, | ||
| 6 | # version 2, as published by the Free Software Foundation. | ||
| 7 | # | ||
| 8 | # This program is distributed in the hope it will be useful, but WITHOUT | ||
| 9 | # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or | ||
| 10 | # FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for | ||
| 11 | # more details. | ||
| 12 | |||
| 13 | import os | ||
| 14 | import sys | ||
| 15 | import struct | ||
| 16 | import datetime | ||
| 17 | |||
| 18 | from PySide.QtSql import * | ||
| 19 | |||
| 20 | # Need to access PostgreSQL C library directly to use COPY FROM STDIN | ||
| 21 | from ctypes import * | ||
| 22 | libpq = CDLL("libpq.so.5") | ||
| 23 | PQconnectdb = libpq.PQconnectdb | ||
| 24 | PQconnectdb.restype = c_void_p | ||
| 25 | PQfinish = libpq.PQfinish | ||
| 26 | PQstatus = libpq.PQstatus | ||
| 27 | PQexec = libpq.PQexec | ||
| 28 | PQexec.restype = c_void_p | ||
| 29 | PQresultStatus = libpq.PQresultStatus | ||
| 30 | PQputCopyData = libpq.PQputCopyData | ||
| 31 | PQputCopyData.argtypes = [ c_void_p, c_void_p, c_int ] | ||
| 32 | PQputCopyEnd = libpq.PQputCopyEnd | ||
| 33 | PQputCopyEnd.argtypes = [ c_void_p, c_void_p ] | ||
| 34 | |||
| 35 | sys.path.append(os.environ['PERF_EXEC_PATH'] + \ | ||
| 36 | '/scripts/python/Perf-Trace-Util/lib/Perf/Trace') | ||
| 37 | |||
| 38 | # These perf imports are not used at present | ||
| 39 | #from perf_trace_context import * | ||
| 40 | #from Core import * | ||
| 41 | |||
| 42 | perf_db_export_mode = True | ||
| 43 | |||
| 44 | def usage(): | ||
| 45 | print >> sys.stderr, "Usage is: export-to-postgresql.py <database name> [<columns>]" | ||
| 46 | print >> sys.stderr, "where: columns 'all' or 'branches'" | ||
| 47 | raise Exception("Too few arguments") | ||
| 48 | |||
| 49 | if (len(sys.argv) < 2): | ||
| 50 | usage() | ||
| 51 | |||
| 52 | dbname = sys.argv[1] | ||
| 53 | |||
| 54 | if (len(sys.argv) >= 3): | ||
| 55 | columns = sys.argv[2] | ||
| 56 | else: | ||
| 57 | columns = "all" | ||
| 58 | |||
| 59 | if columns not in ("all", "branches"): | ||
| 60 | usage() | ||
| 61 | |||
| 62 | branches = (columns == "branches") | ||
| 63 | |||
| 64 | output_dir_name = os.getcwd() + "/" + dbname + "-perf-data" | ||
| 65 | os.mkdir(output_dir_name) | ||
| 66 | |||
| 67 | def do_query(q, s): | ||
| 68 | if (q.exec_(s)): | ||
| 69 | return | ||
| 70 | raise Exception("Query failed: " + q.lastError().text()) | ||
| 71 | |||
| 72 | print datetime.datetime.today(), "Creating database..." | ||
| 73 | |||
| 74 | db = QSqlDatabase.addDatabase('QPSQL') | ||
| 75 | query = QSqlQuery(db) | ||
| 76 | db.setDatabaseName('postgres') | ||
| 77 | db.open() | ||
| 78 | try: | ||
| 79 | do_query(query, 'CREATE DATABASE ' + dbname) | ||
| 80 | except: | ||
| 81 | os.rmdir(output_dir_name) | ||
| 82 | raise | ||
| 83 | query.finish() | ||
| 84 | query.clear() | ||
| 85 | db.close() | ||
| 86 | |||
| 87 | db.setDatabaseName(dbname) | ||
| 88 | db.open() | ||
| 89 | |||
| 90 | query = QSqlQuery(db) | ||
| 91 | do_query(query, 'SET client_min_messages TO WARNING') | ||
| 92 | |||
| 93 | do_query(query, 'CREATE TABLE selected_events (' | ||
| 94 | 'id bigint NOT NULL,' | ||
| 95 | 'name varchar(80))') | ||
| 96 | do_query(query, 'CREATE TABLE machines (' | ||
| 97 | 'id bigint NOT NULL,' | ||
| 98 | 'pid integer,' | ||
| 99 | 'root_dir varchar(4096))') | ||
| 100 | do_query(query, 'CREATE TABLE threads (' | ||
| 101 | 'id bigint NOT NULL,' | ||
| 102 | 'machine_id bigint,' | ||
| 103 | 'process_id bigint,' | ||
| 104 | 'pid integer,' | ||
| 105 | 'tid integer)') | ||
| 106 | do_query(query, 'CREATE TABLE comms (' | ||
| 107 | 'id bigint NOT NULL,' | ||
| 108 | 'comm varchar(16))') | ||
| 109 | do_query(query, 'CREATE TABLE comm_threads (' | ||
| 110 | 'id bigint NOT NULL,' | ||
| 111 | 'comm_id bigint,' | ||
| 112 | 'thread_id bigint)') | ||
| 113 | do_query(query, 'CREATE TABLE dsos (' | ||
| 114 | 'id bigint NOT NULL,' | ||
| 115 | 'machine_id bigint,' | ||
| 116 | 'short_name varchar(256),' | ||
| 117 | 'long_name varchar(4096),' | ||
| 118 | 'build_id varchar(64))') | ||
| 119 | do_query(query, 'CREATE TABLE symbols (' | ||
| 120 | 'id bigint NOT NULL,' | ||
| 121 | 'dso_id bigint,' | ||
| 122 | 'sym_start bigint,' | ||
| 123 | 'sym_end bigint,' | ||
| 124 | 'binding integer,' | ||
| 125 | 'name varchar(2048))') | ||
| 126 | if branches: | ||
| 127 | do_query(query, 'CREATE TABLE samples (' | ||
| 128 | 'id bigint NOT NULL,' | ||
| 129 | 'evsel_id bigint,' | ||
| 130 | 'machine_id bigint,' | ||
| 131 | 'thread_id bigint,' | ||
| 132 | 'comm_id bigint,' | ||
| 133 | 'dso_id bigint,' | ||
| 134 | 'symbol_id bigint,' | ||
| 135 | 'sym_offset bigint,' | ||
| 136 | 'ip bigint,' | ||
| 137 | 'time bigint,' | ||
| 138 | 'cpu integer,' | ||
| 139 | 'to_dso_id bigint,' | ||
| 140 | 'to_symbol_id bigint,' | ||
| 141 | 'to_sym_offset bigint,' | ||
| 142 | 'to_ip bigint)') | ||
| 143 | else: | ||
| 144 | do_query(query, 'CREATE TABLE samples (' | ||
| 145 | 'id bigint NOT NULL,' | ||
| 146 | 'evsel_id bigint,' | ||
| 147 | 'machine_id bigint,' | ||
| 148 | 'thread_id bigint,' | ||
| 149 | 'comm_id bigint,' | ||
| 150 | 'dso_id bigint,' | ||
| 151 | 'symbol_id bigint,' | ||
| 152 | 'sym_offset bigint,' | ||
| 153 | 'ip bigint,' | ||
| 154 | 'time bigint,' | ||
| 155 | 'cpu integer,' | ||
| 156 | 'to_dso_id bigint,' | ||
| 157 | 'to_symbol_id bigint,' | ||
| 158 | 'to_sym_offset bigint,' | ||
| 159 | 'to_ip bigint,' | ||
| 160 | 'period bigint,' | ||
| 161 | 'weight bigint,' | ||
| 162 | 'transaction bigint,' | ||
| 163 | 'data_src bigint)') | ||
| 164 | |||
| 165 | do_query(query, 'CREATE VIEW samples_view AS ' | ||
| 166 | 'SELECT ' | ||
| 167 | 'id,' | ||
| 168 | 'time,' | ||
| 169 | 'cpu,' | ||
| 170 | '(SELECT pid FROM threads WHERE id = thread_id) AS pid,' | ||
| 171 | '(SELECT tid FROM threads WHERE id = thread_id) AS tid,' | ||
| 172 | '(SELECT comm FROM comms WHERE id = comm_id) AS command,' | ||
| 173 | '(SELECT name FROM selected_events WHERE id = evsel_id) AS event,' | ||
| 174 | 'to_hex(ip) AS ip_hex,' | ||
| 175 | '(SELECT name FROM symbols WHERE id = symbol_id) AS symbol,' | ||
| 176 | 'sym_offset,' | ||
| 177 | '(SELECT short_name FROM dsos WHERE id = dso_id) AS dso_short_name,' | ||
| 178 | 'to_hex(to_ip) AS to_ip_hex,' | ||
| 179 | '(SELECT name FROM symbols WHERE id = to_symbol_id) AS to_symbol,' | ||
| 180 | 'to_sym_offset,' | ||
| 181 | '(SELECT short_name FROM dsos WHERE id = to_dso_id) AS to_dso_short_name' | ||
| 182 | ' FROM samples') | ||
| 183 | |||
| 184 | |||
| 185 | file_header = struct.pack("!11sii", "PGCOPY\n\377\r\n\0", 0, 0) | ||
| 186 | file_trailer = "\377\377" | ||
| 187 | |||
| 188 | def open_output_file(file_name): | ||
| 189 | path_name = output_dir_name + "/" + file_name | ||
| 190 | file = open(path_name, "w+") | ||
| 191 | file.write(file_header) | ||
| 192 | return file | ||
| 193 | |||
| 194 | def close_output_file(file): | ||
| 195 | file.write(file_trailer) | ||
| 196 | file.close() | ||
| 197 | |||
| 198 | def copy_output_file_direct(file, table_name): | ||
| 199 | close_output_file(file) | ||
| 200 | sql = "COPY " + table_name + " FROM '" + file.name + "' (FORMAT 'binary')" | ||
| 201 | do_query(query, sql) | ||
| 202 | |||
| 203 | # Use COPY FROM STDIN because security may prevent postgres from accessing the files directly | ||
| 204 | def copy_output_file(file, table_name): | ||
| 205 | conn = PQconnectdb("dbname = " + dbname) | ||
| 206 | if (PQstatus(conn)): | ||
| 207 | raise Exception("COPY FROM STDIN PQconnectdb failed") | ||
| 208 | file.write(file_trailer) | ||
| 209 | file.seek(0) | ||
| 210 | sql = "COPY " + table_name + " FROM STDIN (FORMAT 'binary')" | ||
| 211 | res = PQexec(conn, sql) | ||
| 212 | if (PQresultStatus(res) != 4): | ||
| 213 | raise Exception("COPY FROM STDIN PQexec failed") | ||
| 214 | data = file.read(65536) | ||
| 215 | while (len(data)): | ||
| 216 | ret = PQputCopyData(conn, data, len(data)) | ||
| 217 | if (ret != 1): | ||
| 218 | raise Exception("COPY FROM STDIN PQputCopyData failed, error " + str(ret)) | ||
| 219 | data = file.read(65536) | ||
| 220 | ret = PQputCopyEnd(conn, None) | ||
| 221 | if (ret != 1): | ||
| 222 | raise Exception("COPY FROM STDIN PQputCopyEnd failed, error " + str(ret)) | ||
| 223 | PQfinish(conn) | ||
| 224 | |||
| 225 | def remove_output_file(file): | ||
| 226 | name = file.name | ||
| 227 | file.close() | ||
| 228 | os.unlink(name) | ||
| 229 | |||
| 230 | evsel_file = open_output_file("evsel_table.bin") | ||
| 231 | machine_file = open_output_file("machine_table.bin") | ||
| 232 | thread_file = open_output_file("thread_table.bin") | ||
| 233 | comm_file = open_output_file("comm_table.bin") | ||
| 234 | comm_thread_file = open_output_file("comm_thread_table.bin") | ||
| 235 | dso_file = open_output_file("dso_table.bin") | ||
| 236 | symbol_file = open_output_file("symbol_table.bin") | ||
| 237 | sample_file = open_output_file("sample_table.bin") | ||
| 238 | |||
| 239 | def trace_begin(): | ||
| 240 | print datetime.datetime.today(), "Writing to intermediate files..." | ||
| 241 | # id == 0 means unknown. It is easier to create records for them than replace the zeroes with NULLs | ||
| 242 | evsel_table(0, "unknown") | ||
| 243 | machine_table(0, 0, "unknown") | ||
| 244 | thread_table(0, 0, 0, -1, -1) | ||
| 245 | comm_table(0, "unknown") | ||
| 246 | dso_table(0, 0, "unknown", "unknown", "") | ||
| 247 | symbol_table(0, 0, 0, 0, 0, "unknown") | ||
| 248 | |||
| 249 | unhandled_count = 0 | ||
| 250 | |||
| 251 | def trace_end(): | ||
| 252 | print datetime.datetime.today(), "Copying to database..." | ||
| 253 | copy_output_file(evsel_file, "selected_events") | ||
| 254 | copy_output_file(machine_file, "machines") | ||
| 255 | copy_output_file(thread_file, "threads") | ||
| 256 | copy_output_file(comm_file, "comms") | ||
| 257 | copy_output_file(comm_thread_file, "comm_threads") | ||
| 258 | copy_output_file(dso_file, "dsos") | ||
| 259 | copy_output_file(symbol_file, "symbols") | ||
| 260 | copy_output_file(sample_file, "samples") | ||
| 261 | |||
| 262 | print datetime.datetime.today(), "Removing intermediate files..." | ||
| 263 | remove_output_file(evsel_file) | ||
| 264 | remove_output_file(machine_file) | ||
| 265 | remove_output_file(thread_file) | ||
| 266 | remove_output_file(comm_file) | ||
| 267 | remove_output_file(comm_thread_file) | ||
| 268 | remove_output_file(dso_file) | ||
| 269 | remove_output_file(symbol_file) | ||
| 270 | remove_output_file(sample_file) | ||
| 271 | os.rmdir(output_dir_name) | ||
| 272 | print datetime.datetime.today(), "Adding primary keys" | ||
| 273 | do_query(query, 'ALTER TABLE selected_events ADD PRIMARY KEY (id)') | ||
| 274 | do_query(query, 'ALTER TABLE machines ADD PRIMARY KEY (id)') | ||
| 275 | do_query(query, 'ALTER TABLE threads ADD PRIMARY KEY (id)') | ||
| 276 | do_query(query, 'ALTER TABLE comms ADD PRIMARY KEY (id)') | ||
| 277 | do_query(query, 'ALTER TABLE comm_threads ADD PRIMARY KEY (id)') | ||
| 278 | do_query(query, 'ALTER TABLE dsos ADD PRIMARY KEY (id)') | ||
| 279 | do_query(query, 'ALTER TABLE symbols ADD PRIMARY KEY (id)') | ||
| 280 | do_query(query, 'ALTER TABLE samples ADD PRIMARY KEY (id)') | ||
| 281 | |||
| 282 | print datetime.datetime.today(), "Adding foreign keys" | ||
| 283 | do_query(query, 'ALTER TABLE threads ' | ||
| 284 | 'ADD CONSTRAINT machinefk FOREIGN KEY (machine_id) REFERENCES machines (id),' | ||
| 285 | 'ADD CONSTRAINT processfk FOREIGN KEY (process_id) REFERENCES threads (id)') | ||
| 286 | do_query(query, 'ALTER TABLE comm_threads ' | ||
| 287 | 'ADD CONSTRAINT commfk FOREIGN KEY (comm_id) REFERENCES comms (id),' | ||
| 288 | 'ADD CONSTRAINT threadfk FOREIGN KEY (thread_id) REFERENCES threads (id)') | ||
| 289 | do_query(query, 'ALTER TABLE dsos ' | ||
| 290 | 'ADD CONSTRAINT machinefk FOREIGN KEY (machine_id) REFERENCES machines (id)') | ||
| 291 | do_query(query, 'ALTER TABLE symbols ' | ||
| 292 | 'ADD CONSTRAINT dsofk FOREIGN KEY (dso_id) REFERENCES dsos (id)') | ||
| 293 | do_query(query, 'ALTER TABLE samples ' | ||
| 294 | 'ADD CONSTRAINT evselfk FOREIGN KEY (evsel_id) REFERENCES selected_events (id),' | ||
| 295 | 'ADD CONSTRAINT machinefk FOREIGN KEY (machine_id) REFERENCES machines (id),' | ||
| 296 | 'ADD CONSTRAINT threadfk FOREIGN KEY (thread_id) REFERENCES threads (id),' | ||
| 297 | 'ADD CONSTRAINT commfk FOREIGN KEY (comm_id) REFERENCES comms (id),' | ||
| 298 | 'ADD CONSTRAINT dsofk FOREIGN KEY (dso_id) REFERENCES dsos (id),' | ||
| 299 | 'ADD CONSTRAINT symbolfk FOREIGN KEY (symbol_id) REFERENCES symbols (id),' | ||
| 300 | 'ADD CONSTRAINT todsofk FOREIGN KEY (to_dso_id) REFERENCES dsos (id),' | ||
| 301 | 'ADD CONSTRAINT tosymbolfk FOREIGN KEY (to_symbol_id) REFERENCES symbols (id)') | ||
| 302 | |||
| 303 | if (unhandled_count): | ||
| 304 | print datetime.datetime.today(), "Warning: ", unhandled_count, " unhandled events" | ||
| 305 | print datetime.datetime.today(), "Done" | ||
| 306 | |||
| 307 | def trace_unhandled(event_name, context, event_fields_dict): | ||
| 308 | global unhandled_count | ||
| 309 | unhandled_count += 1 | ||
| 310 | |||
| 311 | def sched__sched_switch(*x): | ||
| 312 | pass | ||
| 313 | |||
| 314 | def evsel_table(evsel_id, evsel_name, *x): | ||
| 315 | n = len(evsel_name) | ||
| 316 | fmt = "!hiqi" + str(n) + "s" | ||
| 317 | value = struct.pack(fmt, 2, 8, evsel_id, n, evsel_name) | ||
| 318 | evsel_file.write(value) | ||
| 319 | |||
| 320 | def machine_table(machine_id, pid, root_dir, *x): | ||
| 321 | n = len(root_dir) | ||
| 322 | fmt = "!hiqiii" + str(n) + "s" | ||
| 323 | value = struct.pack(fmt, 3, 8, machine_id, 4, pid, n, root_dir) | ||
| 324 | machine_file.write(value) | ||
| 325 | |||
| 326 | def thread_table(thread_id, machine_id, process_id, pid, tid, *x): | ||
| 327 | value = struct.pack("!hiqiqiqiiii", 5, 8, thread_id, 8, machine_id, 8, process_id, 4, pid, 4, tid) | ||
| 328 | thread_file.write(value) | ||
| 329 | |||
| 330 | def comm_table(comm_id, comm_str, *x): | ||
| 331 | n = len(comm_str) | ||
| 332 | fmt = "!hiqi" + str(n) + "s" | ||
| 333 | value = struct.pack(fmt, 2, 8, comm_id, n, comm_str) | ||
| 334 | comm_file.write(value) | ||
| 335 | |||
| 336 | def comm_thread_table(comm_thread_id, comm_id, thread_id, *x): | ||
| 337 | fmt = "!hiqiqiq" | ||
| 338 | value = struct.pack(fmt, 3, 8, comm_thread_id, 8, comm_id, 8, thread_id) | ||
| 339 | comm_thread_file.write(value) | ||
| 340 | |||
| 341 | def dso_table(dso_id, machine_id, short_name, long_name, build_id, *x): | ||
| 342 | n1 = len(short_name) | ||
| 343 | n2 = len(long_name) | ||
| 344 | n3 = len(build_id) | ||
| 345 | fmt = "!hiqiqi" + str(n1) + "si" + str(n2) + "si" + str(n3) + "s" | ||
| 346 | value = struct.pack(fmt, 5, 8, dso_id, 8, machine_id, n1, short_name, n2, long_name, n3, build_id) | ||
| 347 | dso_file.write(value) | ||
| 348 | |||
| 349 | def symbol_table(symbol_id, dso_id, sym_start, sym_end, binding, symbol_name, *x): | ||
| 350 | n = len(symbol_name) | ||
| 351 | fmt = "!hiqiqiqiqiii" + str(n) + "s" | ||
| 352 | value = struct.pack(fmt, 6, 8, symbol_id, 8, dso_id, 8, sym_start, 8, sym_end, 4, binding, n, symbol_name) | ||
| 353 | symbol_file.write(value) | ||
| 354 | |||
| 355 | def sample_table(sample_id, evsel_id, machine_id, thread_id, comm_id, dso_id, symbol_id, sym_offset, ip, time, cpu, to_dso_id, to_symbol_id, to_sym_offset, to_ip, period, weight, transaction, data_src, *x): | ||
| 356 | if branches: | ||
| 357 | value = struct.pack("!hiqiqiqiqiqiqiqiqiqiqiiiqiqiqiq", 15, 8, sample_id, 8, evsel_id, 8, machine_id, 8, thread_id, 8, comm_id, 8, dso_id, 8, symbol_id, 8, sym_offset, 8, ip, 8, time, 4, cpu, 8, to_dso_id, 8, to_symbol_id, 8, to_sym_offset, 8, to_ip) | ||
| 358 | else: | ||
| 359 | value = struct.pack("!hiqiqiqiqiqiqiqiqiqiqiiiqiqiqiqiqiqiqiq", 19, 8, sample_id, 8, evsel_id, 8, machine_id, 8, thread_id, 8, comm_id, 8, dso_id, 8, symbol_id, 8, sym_offset, 8, ip, 8, time, 4, cpu, 8, to_dso_id, 8, to_symbol_id, 8, to_sym_offset, 8, to_ip, 8, period, 8, weight, 8, transaction, 8, data_src) | ||
| 360 | sample_file.write(value) | ||
