aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--common.py26
-rw-r--r--config/config.py16
-rw-r--r--gen/__init__.py0
-rw-r--r--gen/dp.py33
-rw-r--r--gen/generators.py257
-rw-r--r--gen/rv.py86
-rwxr-xr-x[-rw-r--r--]gen_exps.py98
-rw-r--r--parse/ft.py4
-rw-r--r--parse/sched.py2
-rw-r--r--parse/tuple_table.py9
-rwxr-xr-xparse_exps.py13
-rwxr-xr-xrun_exps.py1
12 files changed, 525 insertions, 20 deletions
diff --git a/common.py b/common.py
index 0990cfe..ad3c418 100644
--- a/common.py
+++ b/common.py
@@ -1,9 +1,14 @@
1import os
2import re
3import subprocess
1import sys 4import sys
5
2from collections import defaultdict 6from collections import defaultdict
3from textwrap import dedent 7from textwrap import dedent
4 8
5def get_executable(prog, hint, optional=False): 9def get_executable(prog, hint, optional=False):
6 import os 10 '''Search for @prog in system PATH. Print @hint if no binary is found.'''
11
7 def is_exe(fpath): 12 def is_exe(fpath):
8 return os.path.isfile(fpath) and os.access(fpath, os.X_OK) 13 return os.path.isfile(fpath) and os.access(fpath, os.X_OK)
9 14
@@ -19,12 +24,29 @@ def get_executable(prog, hint, optional=False):
19 24
20 if not optional: 25 if not optional:
21 sys.stderr.write("Cannot find executable '%s' in PATH. This is a part " 26 sys.stderr.write("Cannot find executable '%s' in PATH. This is a part "
22 "of '%s' which should be added to PATH to run." % 27 "of '%s' which should be added to PATH to run.\n" %
23 (prog, hint)) 28 (prog, hint))
24 sys.exit(1) 29 sys.exit(1)
25 else: 30 else:
26 return None 31 return None
27 32
33def get_config_option(option):
34 '''Search for @option in installed kernel config (if present).
35 Raise an IOError if the kernel config isn't found in /boot/.'''
36 uname = subprocess.check_output(["uname", "-r"])[-1]
37 fname = "/boot/config-%s" % uname
38
39 if os.path.exists(fname):
40 config_regex = "^CONFIG_{}=(?P<val>.*)$".format(option)
41 match = re.search(config_regex, open(fname, 'r').read())
42 if not match:
43 return None
44 else:
45 return match.group("val")
46
47 else:
48 raise IOError("No config file exists!")
49
28def recordtype(typename, field_names, default=0): 50def recordtype(typename, field_names, default=0):
29 ''' Mutable namedtuple. Recipe from George Sakkis of MIT.''' 51 ''' Mutable namedtuple. Recipe from George Sakkis of MIT.'''
30 field_names = tuple(map(str, field_names)) 52 field_names = tuple(map(str, field_names))
diff --git a/config/config.py b/config/config.py
index 3282705..d463999 100644
--- a/config/config.py
+++ b/config/config.py
@@ -17,18 +17,20 @@ BINS = {'rtspin' : get_executable('rtspin', 'liblitmus'),
17FILES = {'ft_data' : 'ft.bin', 17FILES = {'ft_data' : 'ft.bin',
18 'linux_data' : 'trace.dat', 18 'linux_data' : 'trace.dat',
19 'sched_data' : 'st-{}.bin', 19 'sched_data' : 'st-{}.bin',
20 'log_data' : 'trace.slog',} 20 'log_data' : 'trace.slog'}
21 21
22'''Default parameter names in params.py.''' 22'''Default parameter names in params.py.'''
23PARAMS = {'sched' : 'scheduler', 23# TODO: add check for config options
24 'dur' : 'duration', 24PARAMS = {'sched' : 'scheduler', # Scheduler used by run_exps
25 'kernel' : 'uname', 25 'dur' : 'duration', # Duration of tests in run_exps
26 'cycles' : 'cpu-frequency'} 26 'kernel' : 'uname', # Regex of required OS name in run_exps
27 'cycles' : 'cpu-frequency', # Frequency run_exps was run with
28 'tasks' : 'tasks' # Number of tasks
29 }
27 30
28'''Default values for program parameters.''' 31'''Default values for program options.'''
29DEFAULTS = {'params_file' : 'params.py', 32DEFAULTS = {'params_file' : 'params.py',
30 'sched_file' : 'sched.py', 33 'sched_file' : 'sched.py',
31 'exps_file' : 'exps.py',
32 'duration' : 10, 34 'duration' : 10,
33 'spin' : 'rtspin', 35 'spin' : 'rtspin',
34 'cycles' : 2000} 36 'cycles' : 2000}
diff --git a/gen/__init__.py b/gen/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/gen/__init__.py
diff --git a/gen/dp.py b/gen/dp.py
new file mode 100644
index 0000000..0ac8cce
--- /dev/null
+++ b/gen/dp.py
@@ -0,0 +1,33 @@
1from __future__ import division
2
3class DesignPointGenerator(object):
4 '''Iterates over all combinations of values specified in options.
5 Shamelessly stolen (and simplified) from bcw.'''
6 def __init__(self, options):
7 self.point_idx = 0 # Current point
8 self.options = options
9 self.total = 1
10 for x in options.itervalues():
11 self.total *= len(x)
12
13 def __iter__(self):
14 return self
15
16 def next(self):
17 while True:
18 if self.point_idx == self.total:
19 raise StopIteration
20 else:
21 point = {}
22
23 divisor = 1
24 for key in sorted(self.options.keys()):
25 size = len(self.options[key])
26
27 option_idx = int(self.point_idx / divisor) % size
28 point[key] = self.options[key][option_idx]
29
30 divisor *= size
31 self.point_idx += 1
32
33 return point
diff --git a/gen/generators.py b/gen/generators.py
new file mode 100644
index 0000000..2fc77a7
--- /dev/null
+++ b/gen/generators.py
@@ -0,0 +1,257 @@
1from Cheetah.Template import Template
2from collections import namedtuple
3from common import get_config_option
4from config.config import DEFAULTS
5from gen.dp import DesignPointGenerator
6from parse.tuple_table import ColMap
7
8import gen.rv as rv
9import os
10import random
11import run.litmus_util as lu
12import schedcat.generator.tasks as tasks
13import shutil as sh
14
15NAMED_PERIODS = {
16 'harmonic' : rv.uniform_choice([25, 50, 100, 200]),
17 'uni-short' : rv.uniform_int( 3, 33),
18 'uni-moderate' : rv.uniform_int(10, 100),
19 'uni-long' : rv.uniform_int(50, 250),
20}
21
22NAMED_UTILIZATIONS = {
23 'uni-very-light': rv.uniform(0.0001, 0.001),
24 'uni-light' : rv.uniform(0.001, 0.1),
25 'uni-medium' : rv.uniform( 0.1, 0.4),
26 'uni-heavy' : rv.uniform( 0.5, 0.9),
27
28 'exp-light' : rv.exponential(0, 1, 0.10),
29 'exp-medium' : rv.exponential(0, 1, 0.25),
30 'exp-heavy' : rv.exponential(0, 1, 0.50),
31
32 'bimo-light' : rv.multimodal([(rv.uniform(0.001, 0.5), 8),
33 (rv.uniform( 0.5, 0.9), 1)]),
34 'bimo-medium' : rv.multimodal([(rv.uniform(0.001, 0.5), 6),
35 (rv.uniform( 0.5, 0.9), 3)]),
36 'bimo-heavy' : rv.multimodal([(rv.uniform(0.001, 0.5), 4),
37 (rv.uniform( 0.5, 0.9), 5)]),
38}
39
40# Cheetah templates for schedule files
41TP_CLUSTER = "plugins/C-EDF/cluster{$level}"
42TP_RM = """#if $release_master
43release_master{1}
44#end if"""
45TP_TBASE = """#for $t in $task_set
46{}$t.cost $t.period
47#end for"""
48TP_PART_TASK = TP_TBASE.format("-p $t.cpu ")
49TP_GLOB_TASK = TP_TBASE.format("")
50
51GenOption = namedtuple('GenOption', ['name', 'types', 'default', 'help'])
52
53class BaseGenerator(object):
54 '''Creates sporadic task sets with the most common Litmus options.'''
55 def __init__(self, name, templates, options, params):
56 self.options = self.__make_options() + options
57
58 self.__setup_params(params)
59
60 self.params = params
61 self.template = "\n".join([TP_RM] + templates)
62 self.name = name
63
64 def __make_options(self):
65 '''Return generic Litmus options.'''
66
67 # Guess defaults using the properties of this computer
68 cpus = lu.num_cpus()
69 try:
70 config = get_config_option("RELEASE_MASTER") and True
71 except:
72 config = False
73 release_master = list(set([False, config]))
74
75 list_types = [str, float, type([])]
76
77 return [GenOption('cpus', int, [cpus],
78 'Number of processors on target system.'),
79 GenOption('num_tasks', int, range(cpus, 5*cpus, cpus),
80 'Number of tasks per experiment.'),
81 GenOption('utils', list_types + NAMED_UTILIZATIONS.keys(),
82 ['uni-medium'],'Task utilization distributions.'),
83 GenOption('periods', list_types + NAMED_PERIODS.keys(),
84 ['harmonic'], 'Task period distributions.'),
85 GenOption('release_master', [True,False], release_master,
86 'Redirect release interrupts to a single CPU.'),
87 GenOption('duration', float, [30], 'Experiment duration.')]
88
89 def __create_dist(self, name, value, named_dists):
90 '''Attempt to create a distribution representing the data in @value.
91 If @value is a string, use it as a key for @named_dists.'''
92 name = "%s distribution" % name
93 # A list of values
94 if type(value) == type([]):
95 map(lambda x : self.__check_value(name, x, [float, int]), value)
96 return rv.uniform_choice(value)
97 elif type(value) in [float, int]:
98 return lambda : value
99 elif value in named_dists:
100 return named_dists[value]
101 else:
102 raise ValueError("Invalid %s value: %s" % (name, value))
103
104 def __create_exp(self, exp_params, out_dir):
105 '''Create a single experiment with @exp_params in @out_dir.'''
106 pdist = self.__create_dist('period',
107 exp_params['periods'],
108 NAMED_PERIODS)
109 udist = self.__create_dist('utilization',
110 exp_params['utils'],
111 NAMED_UTILIZATIONS)
112 tg = tasks.TaskGenerator(period=pdist, util=udist)
113
114 ts = []
115 tries = 0
116 while len(ts) != exp_params['num_tasks'] and tries < 5:
117 ts = tg.make_task_set(max_tasks = exp_params['num_tasks'])
118 tries += 1
119 if len(ts) != exp_params['num_tasks']:
120 print("Failed to create task set with parameters: %s" % exp_params)
121
122 self._customize(ts, exp_params)
123
124 sched_file = out_dir + "/" + DEFAULTS['sched_file']
125 with open(sched_file, 'wa') as f:
126 exp_params['task_set'] = ts
127 f.write(str(Template(self.template, searchList=[exp_params])))
128
129 del exp_params['task_set']
130 exp_params_file = out_dir + "/" + DEFAULTS['params_file']
131 with open(exp_params_file, 'wa') as f:
132 exp_params['scheduler'] = 'CEDF'
133 f.write(str(exp_params))
134
135 def __setup_params(self, params):
136 '''Set default parameter values and check that values are valid.'''
137 for option in self.options:
138 if option.name not in params:
139 params[option.name] = option.default
140 params[option.name] = self._check_value(option.name,
141 option.types,
142 params[option.name])
143 return params
144
145
146 def _check_value(self, name, types, val):
147 '''Raise an exception if the value of type of @val is not specified
148 in @types. Returns a copy of @val with strings converted to raw
149 Python types, if possible.'''
150 if types == float:
151 types = [float, int]
152 if type(types) != type([]):
153 types = [types]
154 if type(val) != type([]):
155 val = [val]
156
157 retval = []
158 for v in val:
159 # Has to be a better way to find this
160 v = False if v in ['f', 'False', 'false', 'n', 'no'] else v
161 v = True if v in ['t', 'True', 'true', 'y', 'yes'] else v
162
163 if type(v) not in types and v not in types:
164 # Try and convert v to one of the specified types
165 parsed = None
166 for t in types:
167 try:
168 parsed = t(v)
169 break
170 except:
171 pass
172
173 if parsed:
174 retval += [parsed]
175 else:
176 raise TypeError("Invalid %s value: '%s'" % (name, v))
177 else:
178 retval += [v]
179 return retval
180
181 def _customize(self, taskset, exp_params):
182 '''Configure a generated taskset with extra parameters.'''
183 pass
184
185 def create_exps(self, out_dir, force):
186 '''Create experiments for all possible combinations of params in
187 @out_dir. Overwrite existing files if @force is True.'''
188 col_map = ColMap()
189
190 # Track changing values so only relevant parameters are included
191 # in directory names
192 for dp in DesignPointGenerator(self.params):
193 for k, v in dp.iteritems():
194 col_map.try_add(k, v)
195
196 for dp in DesignPointGenerator(self.params):
197 dir_leaf = "sched=%s_%s" % (self.name, col_map.get_encoding(dp))
198 dir_path = "%s/%s" % (out_dir, dir_leaf.strip('_'))
199
200 if os.path.exists(dir_path):
201 if force:
202 sh.rmtree(dir_path)
203 else:
204 print("Skipping existing experiment: '%s'" % dir_path)
205 continue
206
207 os.mkdir(dir_path)
208
209 self.__create_exp(dp, dir_path)
210
211 def print_help(self):
212 s = str(Template("""Generator $name:
213 #for $o in $options
214 $o.name -- $o.help
215 \tDefault: $o.default
216 \tAllowed: $o.types
217 #end for""", searchList=vars(self)))
218
219 # Has to be an easier way to print this out...
220 for line in s.split("\n"):
221 res = []
222 i = 0
223 for word in line.split(", "):
224 i+= len(word)
225 res += [word]
226 if i > 80:
227 print ", ".join(res[:-1])
228 res = ["\t\t "+res[-1]]
229 i = line.index("'")
230 print ", ".join(res)
231
232class PartitionedGenerator(BaseGenerator):
233 def __init__(self, name, templates, options, params):
234 super(PartitionedGenerator, self).__init__(name,
235 templates + [TP_PART_TASK], options, params)
236
237 def _customize(self, taskset, exp_params):
238 start = 1 if exp_params['release_master'] else 0
239 # Random partition for now: could do a smart partitioning
240 for t in taskset:
241 t.cpu = random.randint(start, exp_params['cpus'] - 1)
242
243class PedfGenerator(PartitionedGenerator):
244 def __init__(self, params={}):
245 super(PedfGenerator, self).__init__("P-EDF", [], [], params)
246
247class CedfGenerator(PartitionedGenerator):
248 LEVEL_OPTION = GenOption('level', ['L2', 'L3', 'All'], ['L2'],
249 'Cache clustering level.',)
250
251 def __init__(self, params={}):
252 super(CedfGenerator, self).__init__("C-EDF", [TP_CLUSTER],
253 [CedfGenerator.LEVEL_OPTION], params)
254
255class GedfGenerator(BaseGenerator):
256 def __init__(self, params={}):
257 super(GedfGenerator, self).__init__("G-EDF", [TP_GLOB_TASK], [], params)
diff --git a/gen/rv.py b/gen/rv.py
new file mode 100644
index 0000000..e6f4d0f
--- /dev/null
+++ b/gen/rv.py
@@ -0,0 +1,86 @@
1from __future__ import division
2import random
3
4def uniform_int(minval, maxval):
5 "Create a function that draws ints uniformly from {minval, ..., maxval}"
6 def _draw():
7 return random.randint(minval, maxval)
8 return _draw
9
10def uniform(minval, maxval):
11 "Create a function that draws floats uniformly from [minval, maxval]"
12 def _draw():
13 return random.uniform(minval, maxval)
14 return _draw
15
16def bernoulli(p):
17 "Create a function that flips a weight coin with probability p"
18 def _draw():
19 return random.random() < p
20 return _draw
21
22def uniform_choice(choices):
23 "Create a function that draws uniformly elements from choices"
24 selector = uniform_int(0, len(choices) - 1)
25 def _draw():
26 return choices[selector()]
27 return _draw
28
29def truncate(minval, maxval):
30 def _limit(fun):
31 def _f(*args, **kargs):
32 val = fun(*args, **kargs)
33 return min(maxval, max(minval, val))
34 return _f
35 return _limit
36
37def redraw(minval, maxval):
38 def _redraw(dist):
39 def _f(*args, **kargs):
40 in_range = False
41 while not in_range:
42 val = dist(*args, **kargs)
43 in_range = minval <= val <= maxval
44 return val
45 return _f
46 return _redraw
47
48def exponential(minval, maxval, mean, limiter=redraw):
49 """Create a function that draws floats from an exponential
50 distribution with expected value 'mean'. If a drawn value is less
51 than minval or greater than maxval, then either another value is
52 drawn (if limiter=redraw) or the drawn value is set to minval or
53 maxval (if limiter=truncate)."""
54 def _draw():
55 return random.expovariate(1.0 / mean)
56 return limiter(minval, maxval)(_draw)
57
58def multimodal(weighted_distributions):
59 """Create a function that draws values from several distributions
60 with probability according to the given weights in a list of
61 (distribution, weight) pairs."""
62 total_weight = sum([w for (d, w) in weighted_distributions])
63 selector = uniform(0, total_weight)
64 def _draw():
65 x = selector()
66 wsum = 0
67 for (d, w) in weighted_distributions:
68 wsum += w
69 if wsum >= x:
70 return d()
71 assert False # should never drop off
72 return _draw
73
74def uniform_slack(min_slack_ratio, max_slack_ratio):
75 """Choose deadlines uniformly such that the slack
76 is within [cost + min_slack_ratio * (period - cost),
77 cost + max_slack_ratio * (period - cost)].
78
79 Setting max_slack_ratio = 1 implies constrained deadlines.
80 """
81 def choose_deadline(cost, period):
82 slack = period - cost
83 earliest = slack * min_slack_ratio
84 latest = slack * max_slack_ratio
85 return cost + random.uniform(earliest, latest)
86 return choose_deadline
diff --git a/gen_exps.py b/gen_exps.py
index e69de29..e4e8187 100644..100755
--- a/gen_exps.py
+++ b/gen_exps.py
@@ -0,0 +1,98 @@
1#!/usr/bin/env python
2from __future__ import print_function
3
4import os
5import re
6import shutil as sh
7
8from gen.generators import GedfGenerator,PedfGenerator,CedfGenerator
9from optparse import OptionParser
10
11# There has to be a better way to do this...
12GENERATORS = {'C-EDF':CedfGenerator,
13 'P-EDF':PedfGenerator,
14 'G-EDF':GedfGenerator}
15
16def parse_args():
17 parser = OptionParser("usage: %prog [options] [files...] "
18 "[generators...] [param=val[,val]...]")
19
20 parser.add_option('-o', '--out-dir', dest='out_dir',
21 help='directory for data output',
22 default=("%s/exps"%os.getcwd()))
23 parser.add_option('-f', '--force', action='store_true', default=False,
24 dest='force', help='overwrite existing data')
25 parser.add_option('-l', '--list-generators', dest='list_gens',
26 help='list allowed generators', action='store_true',
27 default=False)
28 parser.add_option('-d', '--describe-generators', metavar='generator[,..]',
29 dest='described', default=None,
30 help='describe parameters for generator(s)')
31
32 return parser.parse_args()
33
34def load_file(fname):
35 with open(fname, 'r') as f:
36 data = f.read().strip()
37 try:
38 values = eval(data)
39 if 'generator' not in values:
40 raise ValueError()
41 generator = values['generator']
42 del values['generator']
43 return generator, values
44 except:
45 raise IOError("Invalid generation file: %s" % fname)
46
47def main():
48 opts, args = parse_args()
49
50 # Print generator information on the command line
51 if opts.list_gens:
52 print(", ".join(GENERATORS.keys()))
53 if opts.described != None:
54 for generator in opts.described.split(','):
55 if generator not in GENERATORS:
56 print("No generator '%s'" % generator)
57 else:
58 GENERATORS[generator]().print_help()
59 if opts.list_gens or opts.described:
60 return 0
61
62 params = filter(lambda x : re.match("\w+=\w+", x), args)
63
64 # Ensure some generator is loaded
65 args = list(set(args) - set(params))
66 #TODO: get every loaded plugin, try and use that generator
67 args = args or ['C-EDF', 'G-EDF', 'P-EDF']
68
69 # Split into files to load, named generators
70 files = filter(os.path.exists, args)
71 gen_list = list(set(args) - set(files))
72
73 # Parse all specified parameters to be applied to every experiment
74 global_params = dict(map(lambda x : tuple(x.split("=")), params))
75 for k, v in global_params.iteritems():
76 global_params[k] = v.split(',')
77
78 exp_sets = map(load_file, files)
79 exp_sets += map(lambda x: (x, {}), gen_list)
80
81 if opts.force and os.path.exists(opts.out_dir):
82 sh.rmtree(opts.out_dir)
83 if not os.path.exists(opts.out_dir):
84 os.mkdir(opts.out_dir)
85
86 for gen_name, gen_params in exp_sets:
87 if gen_name not in GENERATORS:
88 raise ValueError("Invalid generator name: %s" % gen_name)
89
90 print("Creating experiments using %s generator..." % gen_name)
91
92 params = dict(gen_params.items() + global_params.items())
93 generator = GENERATORS[gen_name](params)
94
95 generator.create_exps(opts.out_dir, opts.force)
96
97if __name__ == '__main__':
98 main()
diff --git a/parse/ft.py b/parse/ft.py
index a6596b7..5293b00 100644
--- a/parse/ft.py
+++ b/parse/ft.py
@@ -47,12 +47,12 @@ def parse_overhead(result, overhead_bin, overhead, cycles, out_dir, err_file):
47 47
48def sort_ft(ft_file, err_file, out_dir): 48def sort_ft(ft_file, err_file, out_dir):
49 '''Create and return file with sorted overheads from @ft_file.''' 49 '''Create and return file with sorted overheads from @ft_file.'''
50 out_fname = "{}/{}".format(out_dir, FT_SORTED_NAME) 50 out_fname = "{}/{}".format("%s/%s" % (os.getcwd(), out_dir), FT_SORTED_NAME)
51 51
52 # Sort happens in-place 52 # Sort happens in-place
53 sh.copyfile(ft_file, out_fname) 53 sh.copyfile(ft_file, out_fname)
54 cmd = [conf.BINS['ftsort'], out_fname] 54 cmd = [conf.BINS['ftsort'], out_fname]
55 ret = subprocess.call(cmd, cwd=out_dir, stderr=err_file, stdout=err_file) 55 ret = subprocess.call(cmd, cwd="%s/%s" % (os.getcwd(), out_dir), stderr=err_file, stdout=err_file)
56 56
57 if ret: 57 if ret:
58 raise Exception("Sort failed with command: %s" % " ".join(cmd)) 58 raise Exception("Sort failed with command: %s" % " ".join(cmd))
diff --git a/parse/sched.py b/parse/sched.py
index 512ac73..ba0df5e 100644
--- a/parse/sched.py
+++ b/parse/sched.py
@@ -146,7 +146,7 @@ def extract_sched_data(result, data_dir, work_dir):
146 bin_files = conf.FILES['sched_data'].format(".*") 146 bin_files = conf.FILES['sched_data'].format(".*")
147 output_file = "%s/out-st" % work_dir 147 output_file = "%s/out-st" % work_dir
148 148
149 bins = [f for f in os.listdir(data_dir) if re.match(bin_files, f)] 149 bins = ["%s/%s" % (data_dir,f) for f in os.listdir(data_dir) if re.match(bin_files, f)]
150 if not len(bins): 150 if not len(bins):
151 return 151 return
152 152
diff --git a/parse/tuple_table.py b/parse/tuple_table.py
index e5dc39b..86006d2 100644
--- a/parse/tuple_table.py
+++ b/parse/tuple_table.py
@@ -23,6 +23,15 @@ class ColMap(object):
23 key += (kv[col],) 23 key += (kv[col],)
24 return key 24 return key
25 25
26 def get_encoding(self, kv):
27 def escape(val):
28 return str(val).replace("_", "-").replace("=", "-")
29 vals = []
30 for key in self.col_list:
31 k, v = escape(key), escape(kv[key])
32 vals += ["%s=%s" % (k, v)]
33 return "_".join(vals)
34
26 def __contains__(self, col): 35 def __contains__(self, col):
27 return col in self.rev_map 36 return col in self.rev_map
28 37
diff --git a/parse_exps.py b/parse_exps.py
index c8cd8b1..f7e1342 100755
--- a/parse_exps.py
+++ b/parse_exps.py
@@ -18,6 +18,8 @@ def parse_args():
18 # TODO: convert data-dir to proper option, clean 'dest' options 18 # TODO: convert data-dir to proper option, clean 'dest' options
19 parser = OptionParser("usage: %prog [options] [data_dir]...") 19 parser = OptionParser("usage: %prog [options] [data_dir]...")
20 20
21 print("default to no params.py")
22
21 parser.add_option('-o', '--out', dest='out', 23 parser.add_option('-o', '--out', dest='out',
22 help='file or directory for data output', default='parse-data') 24 help='file or directory for data output', default='parse-data')
23 parser.add_option('-c', '--clean', action='store_true', default=False, 25 parser.add_option('-c', '--clean', action='store_true', default=False,
@@ -41,19 +43,14 @@ def get_exp_params(data_dir, col_map):
41 if not os.path.isfile: 43 if not os.path.isfile:
42 raise Exception("No param file '%s' exists!" % param_file) 44 raise Exception("No param file '%s' exists!" % param_file)
43 45
44 # Ignore 'magic' parameters used by these scripts
45 params = load_params(param_file) 46 params = load_params(param_file)
46 for ignored in conf.PARAMS.itervalues():
47 # With the exception of cycles which is used by overhead parsing
48 if ignored in params and ignored != conf.PARAMS['cycles']:
49 params.pop(ignored)
50 47
51 # Store parameters in col_map, which will track which parameters change 48 # Store parameters in col_map, which will track which parameters change
52 # across experiments 49 # across experiments
53 for key, value in params.iteritems(): 50 for key, value in params.iteritems():
54 col_map.try_add(key, value) 51 col_map.try_add(key, value)
55 52
56 # Cycles must be present 53 # Cycles must be present for feather-trace measurement parsing
57 if conf.PARAMS['cycles'] not in params: 54 if conf.PARAMS['cycles'] not in params:
58 params[conf.PARAMS['cycles']] = conf.DEFAULTS['cycles'] 55 params[conf.PARAMS['cycles']] = conf.DEFAULTS['cycles']
59 56
@@ -72,10 +69,10 @@ def load_exps(exp_dirs, col_map, clean):
72 # Used to store error output and debugging info 69 # Used to store error output and debugging info
73 work_dir = data_dir + "/tmp" 70 work_dir = data_dir + "/tmp"
74 71
72 if os.path.exists(work_dir) and clean:
73 sh.rmtree(work_dir)
75 if not os.path.exists(work_dir): 74 if not os.path.exists(work_dir):
76 os.mkdir(work_dir) 75 os.mkdir(work_dir)
77 elif clean:
78 sh.rmtree(work_dir)
79 76
80 params = get_exp_params(data_dir, col_map) 77 params = get_exp_params(data_dir, col_map)
81 78
diff --git a/run_exps.py b/run_exps.py
index cc348ec..8fd9ed2 100755
--- a/run_exps.py
+++ b/run_exps.py
@@ -117,6 +117,7 @@ def load_experiment(sched_file, scheduler, duration, param_file, out_dir):
117 # Cycles is saved here for accurate overhead calculations later 117 # Cycles is saved here for accurate overhead calculations later
118 out_params = dict(params.items() + 118 out_params = dict(params.items() +
119 [(conf.PARAMS['sched'], scheduler), 119 [(conf.PARAMS['sched'], scheduler),
120 (conf.PARAMS['tasks'], len(schedule['spin'])),
120 (conf.PARAMS['dur'], duration), 121 (conf.PARAMS['dur'], duration),
121 (conf.PARAMS['cycles'], lu.cpu_freq())]) 122 (conf.PARAMS['cycles'], lu.cpu_freq())])
122 with open("%s/%s" % (out_dir, conf.DEFAULTS['params_file']), 'w') as f: 123 with open("%s/%s" % (out_dir, conf.DEFAULTS['params_file']), 'w') as f: