diff options
-rw-r--r-- | experiment/executable/executable.py | 2 | ||||
-rw-r--r-- | experiment/experiment.py | 53 | ||||
-rw-r--r-- | experiment/tracer.py | 3 | ||||
-rw-r--r-- | parse/sched.py | 125 | ||||
-rw-r--r-- | parse/tuple_table.py | 3 | ||||
-rwxr-xr-x | parse_exps.py | 5 | ||||
-rwxr-xr-x | run_exps.py | 52 |
7 files changed, 173 insertions, 70 deletions
diff --git a/experiment/executable/executable.py b/experiment/executable/executable.py index 897c2d9..09b7370 100644 --- a/experiment/executable/executable.py +++ b/experiment/executable/executable.py | |||
@@ -71,4 +71,4 @@ class Executable(object): | |||
71 | 71 | ||
72 | self.sp.wait() | 72 | self.sp.wait() |
73 | if self.sp.returncode != 0: | 73 | if self.sp.returncode != 0: |
74 | print >>sys.stderr, "Non-zero return: %s %s" % (self.exec_file, self.extra_args) | 74 | print >>sys.stderr, "Non-zero return: %s %s" % (self.exec_file, " ".join(self.extra_args)) |
diff --git a/experiment/experiment.py b/experiment/experiment.py index a95ca42..a44f798 100644 --- a/experiment/experiment.py +++ b/experiment/experiment.py | |||
@@ -91,16 +91,16 @@ class Experiment(object): | |||
91 | 91 | ||
92 | def __run_tasks(self): | 92 | def __run_tasks(self): |
93 | exec_pause = 0.3 | 93 | exec_pause = 0.3 |
94 | self.log("Starting the program in ({0} seconds)".format( | 94 | self.log("Starting the programs over ({0} seconds)".format( |
95 | len(self.executables) * exec_pause)) | 95 | len(self.executables) * exec_pause)) |
96 | for e in self.executables: | 96 | for e in self.executables: |
97 | try: | 97 | try: |
98 | e.execute() | 98 | e.execute() |
99 | except: | 99 | except: |
100 | raise Exception("Executable failed: %s" % e) | 100 | raise Exception("Executable failed: %s" % e) |
101 | time.sleep(exec_pause) | 101 | time.sleep(exec_pause) |
102 | 102 | ||
103 | sleep_time = 2 | 103 | sleep_time = len(self.executables) / litmus_util.num_cpus() |
104 | self.log("Sleeping for %d seconds before release" % sleep_time) | 104 | self.log("Sleeping for %d seconds before release" % sleep_time) |
105 | time.sleep(sleep_time) | 105 | time.sleep(sleep_time) |
106 | 106 | ||
@@ -117,13 +117,18 @@ class Experiment(object): | |||
117 | if released != len(self.executables): | 117 | if released != len(self.executables): |
118 | # Some tasks failed to release, kill all tasks and fail | 118 | # Some tasks failed to release, kill all tasks and fail |
119 | # Need to re-release non-released tasks before we can kill them though | 119 | # Need to re-release non-released tasks before we can kill them though |
120 | self.log("Failed to release %d tasks! Re-releasing and killing".format( | 120 | self.log("Failed to release {} tasks! Re-releasing and killing".format( |
121 | len(self.experiments) - released)) | 121 | len(self.executables) - released, len(self.executables))) |
122 | 122 | ||
123 | time.sleep(10) | 123 | time.sleep(5) |
124 | litmus_util.release_tasks() | ||
125 | 124 | ||
126 | time.sleep(20) | 125 | released = litmus_util.release_tasks() |
126 | |||
127 | self.log("Re-released %d tasks" % released) | ||
128 | |||
129 | time.sleep(5) | ||
130 | |||
131 | self.log("Killing all tasks") | ||
127 | map(methodcaller('kill'), self.executables) | 132 | map(methodcaller('kill'), self.executables) |
128 | 133 | ||
129 | ret = False | 134 | ret = False |
@@ -147,23 +152,46 @@ class Experiment(object): | |||
147 | 152 | ||
148 | def run_exp(self): | 153 | def run_exp(self): |
149 | self.setup() | 154 | self.setup() |
155 | |||
156 | succ = False | ||
157 | |||
150 | try: | 158 | try: |
151 | self.__run_tasks() | 159 | self.__run_tasks() |
160 | self.log("Saving results in %s" % self.finished_dir) | ||
161 | succ = True | ||
152 | finally: | 162 | finally: |
153 | self.teardown() | 163 | self.teardown() |
154 | 164 | ||
155 | def setup(self): | 165 | if succ: |
156 | self.log("Switching to %s" % self.scheduler) | 166 | self.__save_results() |
157 | litmus_util.switch_scheduler(self.scheduler) | 167 | self.log("Experiment done!") |
158 | 168 | ||
169 | |||
170 | def setup(self): | ||
159 | self.log("Writing %d proc entries" % len(self.proc_entries)) | 171 | self.log("Writing %d proc entries" % len(self.proc_entries)) |
160 | map(methodcaller('write_proc'), self.proc_entries) | 172 | map(methodcaller('write_proc'), self.proc_entries) |
161 | 173 | ||
174 | time.sleep(5) | ||
175 | |||
176 | self.log("Switching to %s" % self.scheduler) | ||
177 | litmus_util.switch_scheduler(self.scheduler) | ||
178 | |||
162 | self.log("Starting %d tracers" % len(self.tracers)) | 179 | self.log("Starting %d tracers" % len(self.tracers)) |
163 | map(methodcaller('start_tracing'), self.tracers) | 180 | map(methodcaller('start_tracing'), self.tracers) |
181 | |||
182 | self.exec_out = open('%s/exec-out.txt' % self.working_dir, 'w') | ||
183 | self.exec_err = open('%s/exec-err.txt' % self.working_dir, 'w') | ||
184 | def set_out(executable): | ||
185 | executable.stdout_file = self.exec_out | ||
186 | executable.stderr_file = self.exec_err | ||
187 | map(set_out, self.executables) | ||
188 | |||
164 | time.sleep(4) | 189 | time.sleep(4) |
165 | 190 | ||
166 | def teardown(self): | 191 | def teardown(self): |
192 | self.exec_out.close() | ||
193 | self.exec_err.close() | ||
194 | |||
167 | sleep_time = 5 | 195 | sleep_time = 5 |
168 | self.log("Sleeping %d seconds to allow buffer flushing" % sleep_time) | 196 | self.log("Sleeping %d seconds to allow buffer flushing" % sleep_time) |
169 | time.sleep(sleep_time) | 197 | time.sleep(sleep_time) |
@@ -174,6 +202,3 @@ class Experiment(object): | |||
174 | self.log("Switching to Linux scheduler") | 202 | self.log("Switching to Linux scheduler") |
175 | litmus_util.switch_scheduler("Linux") | 203 | litmus_util.switch_scheduler("Linux") |
176 | 204 | ||
177 | self.log("Saving results in %s" % self.finished_dir) | ||
178 | self.__save_results() | ||
179 | self.log("Experiment done!") | ||
diff --git a/experiment/tracer.py b/experiment/tracer.py index ad4ebfe..4949927 100644 --- a/experiment/tracer.py +++ b/experiment/tracer.py | |||
@@ -27,13 +27,14 @@ class LinuxTracer(Tracer): | |||
27 | def __init__(self, output_dir): | 27 | def __init__(self, output_dir): |
28 | super(LinuxTracer, self).__init__("trace-cmd", output_dir) | 28 | super(LinuxTracer, self).__init__("trace-cmd", output_dir) |
29 | 29 | ||
30 | extra_args = ["record", "-e", "sched:sched_switch", | 30 | extra_args = ["record", # "-e", "sched:sched_switch", |
31 | "-e", "litmus:*", | 31 | "-e", "litmus:*", |
32 | "-o", "%s/%s" % (output_dir, conf.FILES['linux_data'])] | 32 | "-o", "%s/%s" % (output_dir, conf.FILES['linux_data'])] |
33 | stdout = open('%s/trace-cmd-stdout.txt' % self.output_dir, 'w') | 33 | stdout = open('%s/trace-cmd-stdout.txt' % self.output_dir, 'w') |
34 | stderr = open('%s/trace-cmd-stderr.txt' % self.output_dir, 'w') | 34 | stderr = open('%s/trace-cmd-stderr.txt' % self.output_dir, 'w') |
35 | 35 | ||
36 | execute = Executable(conf.BINS['trace-cmd'], extra_args, stdout, stderr) | 36 | execute = Executable(conf.BINS['trace-cmd'], extra_args, stdout, stderr) |
37 | execute.cwd = output_dir | ||
37 | self.bins.append(execute) | 38 | self.bins.append(execute) |
38 | 39 | ||
39 | @staticmethod | 40 | @staticmethod |
diff --git a/parse/sched.py b/parse/sched.py index 5e3ba6b..b84e16e 100644 --- a/parse/sched.py +++ b/parse/sched.py | |||
@@ -1,6 +1,5 @@ | |||
1 | """ | 1 | """ |
2 | TODO: make regexes indexable by name | 2 | TODO: No longer very pythonic, lot of duplicate code |
3 | |||
4 | """ | 3 | """ |
5 | 4 | ||
6 | import config.config as conf | 5 | import config.config as conf |
@@ -12,7 +11,27 @@ import subprocess | |||
12 | from collections import namedtuple,defaultdict | 11 | from collections import namedtuple,defaultdict |
13 | from point import Measurement,Type | 12 | from point import Measurement,Type |
14 | 13 | ||
15 | TaskConfig = namedtuple('TaskConfig', ['cpu','wcet','period']) | 14 | PARAM_RECORD = r"(?P<RECORD>" +\ |
15 | r"PARAM *?(?P<PID>\d+)\/.*?" +\ | ||
16 | r"cost:\s+(?P<WCET>[\d\.]+)ms.*?" +\ | ||
17 | r"period.*?(?P<PERIOD>[\d.]+)ms.*?" +\ | ||
18 | r"part.*?(?P<CPU>\d+)[, ]*" +\ | ||
19 | r"(?:class=(?P<CLASS>\w+))?[, ]*" +\ | ||
20 | r"(?:level=(?P<LEVEL>\w+))?).*$" | ||
21 | EXIT_RECORD = r"(?P<RECORD>" +\ | ||
22 | r"TASK_EXIT *?(?P<PID>\d+)/.*?" +\ | ||
23 | r"Avg.*?(?P<AVG>\d+).*?" +\ | ||
24 | r"Max.*?(?P<MAX>\d+))" | ||
25 | TARDY_RECORD = r"(?P<RECORD>" +\ | ||
26 | r"TARDY.*?(?P<PID>\d+)/(?P<JOB>\d+).*?" +\ | ||
27 | r"Tot.*?(?P<TOTAL>[\d\.]+).*?ms.*?" +\ | ||
28 | r"(?P<MAX>[\d\.]+).*?ms.*?" +\ | ||
29 | r"(?P<MISSES>[\d\.]+))" | ||
30 | COMPLETION_RECORD = r"(?P<RECORD>" +\ | ||
31 | r"COMPLETION.*?(?P<PID>\d+)/.*?" +\ | ||
32 | r"(?P<EXEC>[\d\.]+)ms)" | ||
33 | |||
34 | TaskConfig = namedtuple('TaskConfig', ['cpu','wcet','period','type','level']) | ||
16 | Task = namedtuple('Task', ['pid', 'config']) | 35 | Task = namedtuple('Task', ['pid', 'config']) |
17 | 36 | ||
18 | def get_st_output(data_dir, out_dir): | 37 | def get_st_output(data_dir, out_dir): |
@@ -34,18 +53,43 @@ def get_st_output(data_dir, out_dir): | |||
34 | return output_file | 53 | return output_file |
35 | 54 | ||
36 | def get_tasks(data): | 55 | def get_tasks(data): |
37 | reg = r"PARAM *?(\d+)\/.*?cost:\s+([\d\.]+)ms.*?period.*?([\d.]+)ms.*?part.*?(\d+)" | ||
38 | ret = [] | 56 | ret = [] |
39 | for match in re.findall(reg, data): | 57 | for match in re.finditer(PARAM_RECORD, data, re.M): |
40 | t = Task(match[0], TaskConfig(match[3],match[1],match[2])) | 58 | try: |
41 | ret += [t] | 59 | t = Task( int(match.group('PID')), |
60 | TaskConfig( int(match.group('CPU')), | ||
61 | float(match.group('WCET')), | ||
62 | float(match.group('PERIOD')), | ||
63 | match.group("CLASS"), | ||
64 | match.group("LEVEL"))) | ||
65 | if not (t.config.period and t.pid): | ||
66 | raise Exception() | ||
67 | ret += [t] | ||
68 | except Exception as e: | ||
69 | raise Exception("Invalid task record: %s\nparsed:\n\t%s\n\t%s" % | ||
70 | (e, match.groupdict(), match.group('RECORD'))) | ||
42 | return ret | 71 | return ret |
43 | 72 | ||
73 | def get_tasks_dict(data): | ||
74 | tasks_list = get_tasks(data) | ||
75 | tasks_dict = {} | ||
76 | for t in tasks_list: | ||
77 | tasks_dict[t.pid] = t | ||
78 | return tasks_dict | ||
79 | |||
44 | def get_task_exits(data): | 80 | def get_task_exits(data): |
45 | reg = r"TASK_EXIT *?(\d+)/.*?Avg.*?(\d+).*?Max.*?(\d+)" | ||
46 | ret = [] | 81 | ret = [] |
47 | for match in re.findall(reg, data): | 82 | for match in re.finditer(EXIT_RECORD, data): |
48 | m = Measurement(match[0], {Type.Max : match[2], Type.Avg : match[1]}) | 83 | try: |
84 | m = Measurement( int(match.group('PID')), | ||
85 | {Type.Max : float(match.group('MAX')), | ||
86 | Type.Avg : float(match.group('AVG'))}) | ||
87 | for (type, value) in m: | ||
88 | if not value: raise Exception() | ||
89 | except: | ||
90 | raise Exception("Invalid exit record, parsed:\n\t%s\n\t%s" % | ||
91 | (match.groupdict(), m.group('RECORD'))) | ||
92 | |||
49 | ret += [m] | 93 | ret += [m] |
50 | return ret | 94 | return ret |
51 | 95 | ||
@@ -55,40 +99,51 @@ def extract_tardy_vals(data, exp_point): | |||
55 | avg_tards = [] | 99 | avg_tards = [] |
56 | max_tards = [] | 100 | max_tards = [] |
57 | 101 | ||
58 | for t in get_tasks(data): | 102 | tasks = get_tasks_dict(data) |
59 | reg = r"TARDY.*?" + t.pid + "/(\d+).*?Tot.*?([\d\.]+).*?ms.*?([\d\.]+).*?ms.*?([\d\.]+)" | ||
60 | matches = re.findall(reg, data) | ||
61 | if len(matches) != 0: | ||
62 | jobs = float(matches[0][0]) | ||
63 | 103 | ||
64 | total_tard = float(matches[0][1]) | 104 | for match in re.finditer(TARDY_RECORD, data): |
65 | avg_tard = (total_tard / jobs) / float(t.config.period) | 105 | try: |
66 | max_tard = float(matches[0][2]) / float(t.config.period) | 106 | pid = int(match.group("PID")) |
107 | jobs = int(match.group("JOB")) | ||
108 | misses = int(match.group("MISSES")) | ||
109 | total_tard = float(match.group("TOTAL")) | ||
110 | max_tard = float(match.group("MAX")) | ||
67 | 111 | ||
68 | misses = float(matches[0][3]) | 112 | if not (jobs and pid): raise Exception() |
69 | if misses != 0: | 113 | except: |
70 | miss_ratio = (misses / jobs) | 114 | raise Exception("Invalid tardy record:\n\t%s\n\t%s" % |
71 | else: | 115 | (match.groupdict(), match.group("RECORD"))) |
72 | miss_ratio = 0 | ||
73 | 116 | ||
74 | ratios += [miss_ratio] | 117 | if pid not in tasks: |
75 | avg_tards += [avg_tard] | 118 | raise Exception("Invalid pid '%d' in tardy record:\n\t%s" % |
76 | max_tards += [max_tard] | 119 | match.group("RECORD")) |
120 | |||
121 | t = tasks[pid] | ||
122 | avg_tards += [ total_tard / (jobs * t.config.period) ] | ||
123 | max_tards += [ max_tard / t.config.period ] | ||
124 | ratios += [ misses / jobs ] | ||
77 | 125 | ||
78 | exp_point["avg-rel-tard"] = Measurement().from_array(avg_tards) | 126 | exp_point["avg-rel-tard"] = Measurement().from_array(avg_tards) |
79 | exp_point["max-rel-tard"] = Measurement().from_array(max_tards) | 127 | exp_point["max-rel-tard"] = Measurement().from_array(max_tards) |
80 | exp_point["miss-ratio"] = Measurement().from_array(ratios) | 128 | exp_point["miss-ratio"] = Measurement().from_array(ratios) |
81 | 129 | ||
82 | def extract_variance(data, exp_point): | 130 | def extract_variance(data, exp_point): |
83 | varz = [] | 131 | varz = [] |
84 | for t in get_tasks(data): | 132 | completions = defaultdict(lambda: []) |
85 | reg = r"COMPLETION.*?" + t.pid + r".*?([\d\.]+)ms" | 133 | |
86 | matches = re.findall(reg, data) | 134 | for match in re.finditer(COMPLETION_RECORD, data): |
135 | try: | ||
136 | pid = int(match.group("PID")) | ||
137 | duration = float(match.group("EXEC")) | ||
87 | 138 | ||
88 | if len(matches) == 0: | 139 | if not (duration and pid): raise Exception() |
89 | return 0 | 140 | except: |
141 | raise Exception("Invalid completion record:\n\t%s\n\t%s" % | ||
142 | (match.groupdict(), match.group("RECORD"))) | ||
143 | completions[pid] += [duration] | ||
90 | 144 | ||
91 | job_times = np.array(filter(lambda x: float(x) != 0, matches), dtype=np.float) | 145 | for (pid, durations) in completions: |
146 | job_times = np.array(durations) | ||
92 | 147 | ||
93 | # Coefficient of variation | 148 | # Coefficient of variation |
94 | cv = job_times.std() / job_times.mean() | 149 | cv = job_times.std() / job_times.mean() |
@@ -127,6 +182,10 @@ def config_exit_stats(file): | |||
127 | task_list = sorted(config_dict[config]) | 182 | task_list = sorted(config_dict[config]) |
128 | 183 | ||
129 | # Replace tasks with corresponding exit stats | 184 | # Replace tasks with corresponding exit stats |
185 | if not t.pid in exit_dict: | ||
186 | raise Exception("Missing exit record for task '%s' in '%s'" % | ||
187 | (t, file)) | ||
188 | |||
130 | exit_list = [exit_dict[t.pid] for t in task_list] | 189 | exit_list = [exit_dict[t.pid] for t in task_list] |
131 | config_dict[config] = exit_list | 190 | config_dict[config] = exit_list |
132 | 191 | ||
diff --git a/parse/tuple_table.py b/parse/tuple_table.py index b56fa6c..6363b80 100644 --- a/parse/tuple_table.py +++ b/parse/tuple_table.py | |||
@@ -27,6 +27,9 @@ class ColMap(object): | |||
27 | 27 | ||
28 | return key | 28 | return key |
29 | 29 | ||
30 | def __contains__(self, col): | ||
31 | return col in self.rev_map | ||
32 | |||
30 | def get_map(self, tuple): | 33 | def get_map(self, tuple): |
31 | map = {} | 34 | map = {} |
32 | for i in range(0, len(tuple)): | 35 | for i in range(0, len(tuple)): |
diff --git a/parse_exps.py b/parse_exps.py index 3a1d1b9..c91a654 100755 --- a/parse_exps.py +++ b/parse_exps.py | |||
@@ -90,6 +90,10 @@ def main(): | |||
90 | 90 | ||
91 | (plain_exps, scaling_bases) = gen_exp_data(args, base_conf, col_map) | 91 | (plain_exps, scaling_bases) = gen_exp_data(args, base_conf, col_map) |
92 | 92 | ||
93 | if base_conf and base_conf.keys()[0] not in col_map: | ||
94 | raise IOError("Base column '%s' not present in any parameters!" % | ||
95 | base_conf.keys()[0]) | ||
96 | |||
93 | base_table = TupleTable(col_map) | 97 | base_table = TupleTable(col_map) |
94 | result_table = TupleTable(col_map) | 98 | result_table = TupleTable(col_map) |
95 | 99 | ||
@@ -105,6 +109,7 @@ def main(): | |||
105 | ft.extract_ft_data(exp.data_files.ft, result, conf.BASE_EVENTS) | 109 | ft.extract_ft_data(exp.data_files.ft, result, conf.BASE_EVENTS) |
106 | 110 | ||
107 | if exp.data_files.st: | 111 | if exp.data_files.st: |
112 | base = None | ||
108 | if base_conf: | 113 | if base_conf: |
109 | # Try to find a scaling base | 114 | # Try to find a scaling base |
110 | base_params = copy.deepcopy(exp.params) | 115 | base_params = copy.deepcopy(exp.params) |
diff --git a/run_exps.py b/run_exps.py index 4484952..825ad5b 100755 --- a/run_exps.py +++ b/run_exps.py | |||
@@ -38,13 +38,13 @@ def parse_args(): | |||
38 | def convert_data(data): | 38 | def convert_data(data): |
39 | """Convert a non-python schedule file into the python format""" | 39 | """Convert a non-python schedule file into the python format""" |
40 | regex = re.compile( | 40 | regex = re.compile( |
41 | 41 | r"(?P<PROC>^" | |
42 | r"(?P<PROC>^" | 42 | r"(?P<HEADER>/proc/\w+?/)?" |
43 | r"(?P<HEADER>/proc/\w+?/)?" | 43 | r"(?P<ENTRY>[\w\/]+)" |
44 | r"(?P<ENTRY>[\w\/]+)" | 44 | r"\s*{\s*(?P<CONTENT>.*?)\s*?}$)|" |
45 | r"\s*{\s*(?P<CONTENT>.*?)\s*?}$)|" | 45 | r"(?P<SPIN>^" |
46 | r"(?P<SPIN>^(?P<TYPE>\w+?spin)?\s*?" | 46 | r"(?P<TYPE>\w+?spin)?\s+" |
47 | r"(?P<ARGS>\w[\s\w]*?)?\s*?$)", | 47 | r"(?P<ARGS>[\w\-_\d\. ]+)\s*$)", |
48 | re.S|re.I|re.M) | 48 | re.S|re.I|re.M) |
49 | 49 | ||
50 | procs = [] | 50 | procs = [] |
@@ -63,6 +63,15 @@ def convert_data(data): | |||
63 | 63 | ||
64 | return {'proc' : procs, 'spin' : spins} | 64 | return {'proc' : procs, 'spin' : spins} |
65 | 65 | ||
66 | def fix_paths(schedule, exp_dir): | ||
67 | for (idx, (spin, args)) in enumerate(schedule['spin']): | ||
68 | # Replace relative paths (if present) with absolute ones | ||
69 | for arg in args.split(" "): | ||
70 | abspath = "%s/%s" % (exp_dir, arg) | ||
71 | if os.path.exists(abspath): | ||
72 | args = args.replace(arg, abspath) | ||
73 | |||
74 | schedule['spin'][idx] = (spin, args) | ||
66 | 75 | ||
67 | def get_dirs(sched_file, out_base_dir): | 76 | def get_dirs(sched_file, out_base_dir): |
68 | sched_leaf_dir = re.findall(r".*/([\w_-]+)/.*?$", sched_file)[0] | 77 | sched_leaf_dir = re.findall(r".*/([\w_-]+)/.*?$", sched_file)[0] |
@@ -88,27 +97,27 @@ def load_experiment(sched_file, scheduler, duration, param_file, out_base): | |||
88 | params = {} | 97 | params = {} |
89 | kernel = "" | 98 | kernel = "" |
90 | 99 | ||
91 | if not scheduler or not duration: | 100 | param_file = param_file or \ |
92 | param_file = param_file or \ | 101 | "%s/%s" % (dirname, conf.DEFAULTS['params_file']) |
93 | "%s/%s" % (dirname, conf.DEFAULTS['params_file']) | ||
94 | 102 | ||
95 | if os.path.isfile(param_file): | 103 | if os.path.isfile(param_file): |
96 | params = load_params(param_file) | 104 | params = load_params(param_file) |
97 | scheduler = scheduler or params[conf.PARAMS['sched']] | 105 | scheduler = scheduler or params[conf.PARAMS['sched']] |
98 | duration = duration or params[conf.PARAMS['dur']] | 106 | duration = duration or params[conf.PARAMS['dur']] |
99 | 107 | ||
100 | # Experiments can specify required kernel name | 108 | # Experiments can specify required kernel name |
101 | if conf.PARAMS['kernel'] in params: | 109 | if conf.PARAMS['kernel'] in params: |
102 | kernel = params[conf.PARAMS['kernel']] | 110 | kernel = params[conf.PARAMS['kernel']] |
103 | 111 | ||
104 | duration = duration or conf.DEFAULTS['duration'] | 112 | duration = duration or conf.DEFAULTS['duration'] |
105 | 113 | ||
106 | if not scheduler: | 114 | if not scheduler: |
107 | raise IOError("Parameter scheduler not specified in %s" % (param_file)) | 115 | raise IOError("Parameter scheduler not specified in %s" % (param_file)) |
108 | 116 | ||
109 | # Parse schedule file's intentions | 117 | # Parse schedule file's intentions |
110 | schedule = load_schedule(sched_file) | 118 | schedule = load_schedule(sched_file) |
111 | (work_dir, out_dir) = get_dirs(sched_file, out_base) | 119 | (work_dir, out_dir) = get_dirs(sched_file, out_base) |
120 | fix_paths(schedule, os.path.split(sched_file)[0]) | ||
112 | 121 | ||
113 | run_exp(sched_file, schedule, scheduler, kernel, duration, work_dir, out_dir) | 122 | run_exp(sched_file, schedule, scheduler, kernel, duration, work_dir, out_dir) |
114 | 123 | ||
@@ -170,8 +179,9 @@ def run_exp(name, schedule, scheduler, kernel, duration, work_dir, out_dir): | |||
170 | 179 | ||
171 | exp = Experiment(name, scheduler, work_dir, out_dir, | 180 | exp = Experiment(name, scheduler, work_dir, out_dir, |
172 | proc_entries, executables) | 181 | proc_entries, executables) |
173 | exp.run_exp() | ||
174 | 182 | ||
183 | exp.run_exp() | ||
184 | |||
175 | 185 | ||
176 | def main(): | 186 | def main(): |
177 | opts, args = parse_args() | 187 | opts, args = parse_args() |