diff options
Diffstat (limited to 'parse_exps.py')
-rwxr-xr-x | parse_exps.py | 122 |
1 files changed, 34 insertions, 88 deletions
diff --git a/parse_exps.py b/parse_exps.py index d932b0d..c8cd8b1 100755 --- a/parse_exps.py +++ b/parse_exps.py | |||
@@ -2,11 +2,9 @@ | |||
2 | from __future__ import print_function | 2 | from __future__ import print_function |
3 | 3 | ||
4 | import config.config as conf | 4 | import config.config as conf |
5 | import copy | ||
6 | import os | 5 | import os |
7 | import parse.ft as ft | 6 | import parse.ft as ft |
8 | import parse.sched as st | 7 | import parse.sched as st |
9 | import re | ||
10 | import shutil as sh | 8 | import shutil as sh |
11 | import sys | 9 | import sys |
12 | 10 | ||
@@ -22,13 +20,8 @@ def parse_args(): | |||
22 | 20 | ||
23 | parser.add_option('-o', '--out', dest='out', | 21 | parser.add_option('-o', '--out', dest='out', |
24 | help='file or directory for data output', default='parse-data') | 22 | help='file or directory for data output', default='parse-data') |
25 | |||
26 | # TODO: this means nothing, also remove dests | ||
27 | parser.add_option('-c', '--clean', action='store_true', default=False, | 23 | parser.add_option('-c', '--clean', action='store_true', default=False, |
28 | dest='clean', help='do not output single-point csvs') | 24 | dest='clean', help='do not output single-point csvs') |
29 | parser.add_option('-s', '--scale-against', dest='scale_against', | ||
30 | metavar='PARAM=VALUE', default="", | ||
31 | help='calculate task scaling factors against these configs') | ||
32 | parser.add_option('-i', '--ignore', metavar='[PARAM...]', default="", | 25 | parser.add_option('-i', '--ignore', metavar='[PARAM...]', default="", |
33 | help='ignore changing parameter values') | 26 | help='ignore changing parameter values') |
34 | parser.add_option('-f', '--force', action='store_true', default=False, | 27 | parser.add_option('-f', '--force', action='store_true', default=False, |
@@ -41,136 +34,89 @@ def parse_args(): | |||
41 | 34 | ||
42 | return parser.parse_args() | 35 | return parser.parse_args() |
43 | 36 | ||
44 | ExpData = namedtuple('ExpData', ['name', 'params', 'data_files', 'is_base']) | 37 | ExpData = namedtuple('ExpData', ['path', 'params', 'work_dir']) |
45 | DataFiles = namedtuple('DataFiles', ['st']) | ||
46 | 38 | ||
47 | def get_exp_params(data_dir, col_map): | 39 | def get_exp_params(data_dir, col_map): |
48 | param_file = "%s/%s" % (data_dir, conf.DEFAULTS['params_file']) | 40 | param_file = "%s/%s" % (data_dir, conf.DEFAULTS['params_file']) |
49 | if not os.path.isfile: | 41 | if not os.path.isfile: |
50 | raise Exception("No param file '%s' exists!" % param_file) | 42 | raise Exception("No param file '%s' exists!" % param_file) |
51 | 43 | ||
52 | # Keep only params that uniquely identify the experiment | 44 | # Ignore 'magic' parameters used by these scripts |
53 | params = load_params(param_file) | 45 | params = load_params(param_file) |
54 | for ignored in conf.PARAMS.itervalues(): | 46 | for ignored in conf.PARAMS.itervalues(): |
55 | # Always include cycles or overhead parsing fails | 47 | # With the exception of cycles which is used by overhead parsing |
56 | if ignored in params and ignored != conf.PARAMS['cycles']: | 48 | if ignored in params and ignored != conf.PARAMS['cycles']: |
57 | params.pop(ignored) | 49 | params.pop(ignored) |
58 | 50 | ||
59 | # Track all changed params | 51 | # Store parameters in col_map, which will track which parameters change |
52 | # across experiments | ||
60 | for key, value in params.iteritems(): | 53 | for key, value in params.iteritems(): |
61 | col_map.try_add(key, value) | 54 | col_map.try_add(key, value) |
62 | 55 | ||
56 | # Cycles must be present | ||
63 | if conf.PARAMS['cycles'] not in params: | 57 | if conf.PARAMS['cycles'] not in params: |
64 | params[conf.PARAMS['cycles']] = conf.DEFAULTS['cycles'] | 58 | params[conf.PARAMS['cycles']] = conf.DEFAULTS['cycles'] |
65 | 59 | ||
66 | return params | 60 | return params |
67 | 61 | ||
68 | 62 | ||
69 | def gen_exp_data(exp_dirs, base_conf, col_map, force): | 63 | def load_exps(exp_dirs, col_map, clean): |
70 | plain_exps = [] | 64 | exps = [] |
71 | scaling_bases = [] | ||
72 | 65 | ||
73 | sys.stderr.write("Generating data...\n") | 66 | sys.stderr.write("Loading experiments...\n") |
74 | 67 | ||
75 | for i, data_dir in enumerate(exp_dirs): | 68 | for data_dir in exp_dirs: |
76 | if not os.path.isdir(data_dir): | 69 | if not os.path.isdir(data_dir): |
77 | raise IOError("Invalid experiment '%s'" % os.path.abspath(data_dir)) | 70 | raise IOError("Invalid experiment '%s'" % os.path.abspath(data_dir)) |
78 | 71 | ||
79 | tmp_dir = data_dir + "/tmp" | 72 | # Used to store error output and debugging info |
80 | if not os.path.exists(tmp_dir): | 73 | work_dir = data_dir + "/tmp" |
81 | os.mkdir(tmp_dir) | ||
82 | |||
83 | # Read and translate exp output files | ||
84 | params = get_exp_params(data_dir, col_map) | ||
85 | st_output = st.get_st_output(data_dir, tmp_dir, force) | ||
86 | |||
87 | if base_conf and base_conf.viewitems() & params.viewitems(): | ||
88 | if not st_output: | ||
89 | raise Exception("Scaling base '%s' useless without sched data!" | ||
90 | % data_dir) | ||
91 | is_base = True | ||
92 | |||
93 | base_params = copy.deepcopy(params) | ||
94 | base_params.pop(base_conf.keys()[0]) | ||
95 | 74 | ||
96 | base_exp = ExpData(data_dir, base_params, | 75 | if not os.path.exists(work_dir): |
97 | DataFiles(st_output), True) | 76 | os.mkdir(work_dir) |
98 | scaling_bases += [base_exp] | 77 | elif clean: |
99 | else: | 78 | sh.rmtree(work_dir) |
100 | is_base = False | ||
101 | 79 | ||
102 | # Create experiment named after the data dir | 80 | params = get_exp_params(data_dir, col_map) |
103 | exp_data = ExpData(data_dir, params, | ||
104 | DataFiles(st_output), is_base) | ||
105 | 81 | ||
106 | plain_exps += [exp_data] | 82 | exps += [ ExpData(data_dir, params, work_dir) ] |
107 | 83 | ||
108 | sys.stderr.write('\r {0:.2%}'.format(float(i)/len(exp_dirs))) | 84 | return exps |
109 | sys.stderr.write('\n') | ||
110 | return (plain_exps, scaling_bases) | ||
111 | 85 | ||
112 | def main(): | 86 | def main(): |
113 | opts, args = parse_args() | 87 | opts, args = parse_args() |
114 | 88 | ||
115 | args = args or [os.getcwd()] | 89 | args = args or [os.getcwd()] |
116 | 90 | ||
117 | # Configuration key for task systems used to calculate task | 91 | # Load exp parameters into col_map |
118 | # execution scaling factors | ||
119 | base_conf = dict(re.findall("(.*)=(.*)", opts.scale_against)) | ||
120 | |||
121 | col_map = ColMap() | 92 | col_map = ColMap() |
93 | exps = load_exps(args, col_map, opts.force) | ||
122 | 94 | ||
123 | (plain_exps, scaling_bases) = gen_exp_data(args, base_conf, col_map, opts.force) | 95 | # Don't track changes in ignored parameters |
124 | |||
125 | if base_conf and base_conf.keys()[0] not in col_map: | ||
126 | raise IOError("Base column '%s' not present in any parameters!" % | ||
127 | base_conf.keys()[0]) | ||
128 | |||
129 | base_map = copy.deepcopy(col_map) | ||
130 | if opts.ignore: | 96 | if opts.ignore: |
131 | for param in opts.ignore.split(","): | 97 | for param in opts.ignore.split(","): |
132 | col_map.try_remove(param) | 98 | col_map.try_remove(param) |
133 | 99 | ||
134 | base_table = TupleTable(base_map) # For tracking 'base' experiments | 100 | result_table = TupleTable(col_map) |
135 | result_table = TupleTable(col_map) # For generating output | ||
136 | |||
137 | # Used to find matching scaling_base for each experiment | ||
138 | for base in scaling_bases: | ||
139 | base_table.add_exp(base.params, base) | ||
140 | 101 | ||
141 | sys.stderr.write("Parsing data...\n") | 102 | sys.stderr.write("Parsing data...\n") |
142 | for exp in args: | 103 | for i,exp in enumerate(exps): |
143 | result = ExpPoint(exp) | 104 | result = ExpPoint(exp.path) |
144 | params = get_exp_params(exp, col_map) | 105 | cycles = exp.params[conf.PARAMS['cycles']] |
145 | # Write overheads into result | ||
146 | ft.extract_ft_data(result, exp, | ||
147 | params[conf.PARAMS['cycles']], | ||
148 | exp + "/tmp") | ||
149 | |||
150 | if opts.verbose: | ||
151 | print(result) | ||
152 | |||
153 | for i,exp in enumerate(plain_exps): | ||
154 | result = ExpPoint(exp.name) | ||
155 | |||
156 | if exp.data_files.st: | ||
157 | base = None | ||
158 | if base_conf and not exp.is_base: | ||
159 | # Try to find a scaling base | ||
160 | base_params = copy.deepcopy(exp.params) | ||
161 | base_params.pop(base_conf.keys()[0]) | ||
162 | base = base_table.get_exps(base_params)[0] | ||
163 | 106 | ||
164 | # Write deadline misses / tardiness into result | 107 | # Write overheads into result |
165 | st.extract_sched_data(exp.data_files.st, result, | 108 | ft.extract_ft_data(result, exp.path, exp.work_dir, cycles) |
166 | base.data_files.st if base else None) | ||
167 | 109 | ||
168 | result_table.add_exp(exp.params, result) | 110 | # Write scheduling statistics into result |
111 | st.extract_sched_data(result, exp.path, exp.work_dir) | ||
169 | 112 | ||
170 | if opts.verbose: | 113 | if opts.verbose: |
171 | print(result) | 114 | print(result) |
172 | else: | 115 | else: |
173 | sys.stderr.write('\r {0:.2%}'.format(float(i)/len(plain_exps))) | 116 | sys.stderr.write('\r {0:.2%}'.format(float(i)/len(exps))) |
117 | |||
118 | result_table.add_exp(exp.params, result) | ||
119 | |||
174 | sys.stderr.write('\n') | 120 | sys.stderr.write('\n') |
175 | 121 | ||
176 | if opts.force and os.path.exists(opts.out): | 122 | if opts.force and os.path.exists(opts.out): |