diff options
-rw-r--r-- | experiment/experiment.py | 3 | ||||
-rw-r--r-- | experiment/litmus_util.py | 13 | ||||
-rw-r--r-- | parse/ft.py | 132 | ||||
-rwxr-xr-x | parse_exps.py | 9 | ||||
-rwxr-xr-x | run_exps.py | 6 |
5 files changed, 125 insertions, 38 deletions
diff --git a/experiment/experiment.py b/experiment/experiment.py index 8c88799..deb4ff2 100644 --- a/experiment/experiment.py +++ b/experiment/experiment.py | |||
@@ -178,7 +178,8 @@ class Experiment(object): | |||
178 | self.log("Writing %d proc entries" % len(self.proc_entries)) | 178 | self.log("Writing %d proc entries" % len(self.proc_entries)) |
179 | map(methodcaller('write_proc'), self.proc_entries) | 179 | map(methodcaller('write_proc'), self.proc_entries) |
180 | 180 | ||
181 | time.sleep(5) | 181 | if len(self.proc_entries): |
182 | time.sleep(2) | ||
182 | 183 | ||
183 | self.log("Switching to %s" % self.scheduler) | 184 | self.log("Switching to %s" % self.scheduler) |
184 | litmus_util.switch_scheduler(self.scheduler) | 185 | litmus_util.switch_scheduler(self.scheduler) |
diff --git a/experiment/litmus_util.py b/experiment/litmus_util.py index cde0bca..42d3e5f 100644 --- a/experiment/litmus_util.py +++ b/experiment/litmus_util.py | |||
@@ -17,6 +17,19 @@ def num_cpus(): | |||
17 | cpus += 1 | 17 | cpus += 1 |
18 | return cpus | 18 | return cpus |
19 | 19 | ||
20 | def cpu_freq(): | ||
21 | """ | ||
22 | The frequency (in MHz) of the CPU. | ||
23 | """ | ||
24 | reg = re.compile(r'^cpu MHz\s*:\s*(\d+)', re.M) | ||
25 | with open('/proc/cpuinfo', 'r') as f: | ||
26 | data = f.read() | ||
27 | |||
28 | match = re.search(reg, data) | ||
29 | if not match: | ||
30 | raise Exception("Cannot parse CPU frequency!") | ||
31 | return int(match.group(1)) | ||
32 | |||
20 | def switch_scheduler(switch_to_in): | 33 | def switch_scheduler(switch_to_in): |
21 | """Switch the scheduler to whatever is passed in. | 34 | """Switch the scheduler to whatever is passed in. |
22 | 35 | ||
diff --git a/parse/ft.py b/parse/ft.py index 4e310b0..cbf75f2 100644 --- a/parse/ft.py +++ b/parse/ft.py | |||
@@ -1,4 +1,5 @@ | |||
1 | import config.config as conf | 1 | import config.config as conf |
2 | import numpy as np | ||
2 | import os | 3 | import os |
3 | import re | 4 | import re |
4 | import shutil as sh | 5 | import shutil as sh |
@@ -6,14 +7,17 @@ import subprocess | |||
6 | 7 | ||
7 | from point import Measurement,Type | 8 | from point import Measurement,Type |
8 | 9 | ||
9 | def get_ft_output(data_dir, out_dir, force=False): | 10 | SPLIT_DATA_NAME = "overhead={}.bin" |
11 | FT_DATA_NAME = "sorted-ft.bin" | ||
12 | FIELDS = ["Overhead", "samples", "max", "avg", "min", "med", "std", "var"] | ||
13 | |||
14 | def get_ft_output(data_dir, cycles, out_dir, force=False): | ||
10 | """ | 15 | """ |
11 | Create and return files containing sorted and analyzed overhead data | 16 | Create and return file containing analyzed overhead data |
12 | """ | 17 | """ |
13 | bin_file = conf.FILES['ft_data'] + "$" | 18 | freg = conf.FILES['ft_data'] + "$" |
14 | bins = [f for f in os.listdir(data_dir) if re.match(bin_file, f)] | 19 | bins = [f for f in os.listdir(data_dir) if re.match(freg, f)] |
15 | 20 | ||
16 | FT_DATA_NAME = "scheduler=x-ft" | ||
17 | output_file = "{}/out-ft".format(out_dir) | 21 | output_file = "{}/out-ft".format(out_dir) |
18 | 22 | ||
19 | if os.path.isfile(output_file): | 23 | if os.path.isfile(output_file): |
@@ -23,44 +27,106 @@ def get_ft_output(data_dir, out_dir, force=False): | |||
23 | return output_file | 27 | return output_file |
24 | 28 | ||
25 | if len(bins) != 0: | 29 | if len(bins) != 0: |
30 | bin_file = "{}/{}".format(data_dir, bins[0]) | ||
26 | err_file = open("%s/err-ft" % out_dir, 'w') | 31 | err_file = open("%s/err-ft" % out_dir, 'w') |
27 | # Need to make a copy of the original data file so scripts can change it | 32 | |
28 | sh.copyfile("{}/{}".format(data_dir, bins[0]), | 33 | sorted_bin = sort_ft(bin_file, err_file, out_dir) |
29 | "{}/{}".format(out_dir, FT_DATA_NAME)) | 34 | make_data_file(sorted_bin, cycles, output_file, err_file, out_dir) |
30 | 35 | ||
31 | subprocess.call([conf.BINS['sort'], FT_DATA_NAME], | 36 | os.remove(sorted_bin) |
32 | cwd=out_dir, stderr=err_file, stdout=err_file) | 37 | |
33 | subprocess.call([conf.BINS['split'], FT_DATA_NAME], | 38 | return output_file |
34 | cwd=out_dir, stderr=err_file, stdout=err_file) | ||
35 | |||
36 | # Previous subprocesses just spit out all these intermediate files | ||
37 | bins = [f for f in os.listdir(out_dir) if re.match(".*overhead=.*bin", f)] | ||
38 | bins = [f for f in bins if os.stat("%s/%s"%(out_dir, f)).st_size] | ||
39 | |||
40 | # Analyze will summarize those | ||
41 | # todo pass in f | ||
42 | cmd_arr = [conf.BINS['analyze']] | ||
43 | cmd_arr.extend(bins) | ||
44 | with open(output_file, "w") as f: | ||
45 | subprocess.call(cmd_arr, cwd=out_dir, stdout=f, stderr=err_file) | ||
46 | else: | 39 | else: |
47 | return None | 40 | return None |
48 | return output_file | 41 | return output_file |
49 | 42 | ||
50 | def extract_ft_data(data_file, result, overheads): | 43 | def fmt_cell(x): |
51 | rstr = r",(?:\s+[^\s]+){3}.*?([\d\.]+).*?([\d\.]+),(?:\s+[^\s]+){3}.*?([\d\.]+)" | 44 | if type(x) == str: |
45 | return "%15s" % x | ||
46 | if type(x) == int: | ||
47 | return "%15d" % x | ||
48 | else: | ||
49 | return "%15.3f" % x | ||
50 | |||
51 | def make_data_file(sorted_bin, cycles, out_fname, err_file, out_dir): | ||
52 | """ | ||
53 | Create file containing all overhead information. | ||
54 | """ | ||
55 | base_name = "{}/{}".format(out_dir, SPLIT_DATA_NAME) | ||
56 | |||
57 | with open(out_fname, "w") as f: | ||
58 | f.write("#%s" % ", ".join(fmt_cell(x) for x in FIELDS)) | ||
59 | f.write("\n") | ||
60 | |||
61 | for event in conf.BASE_EVENTS: | ||
62 | ovh_fname = base_name.format(event.replace("_", "-")) | ||
63 | |||
64 | if os.path.exists(ovh_fname): | ||
65 | os.remove(ovh_fname) | ||
66 | ovh_file = open(ovh_fname, 'w') | ||
67 | |||
68 | # Extract matching overhead events into a seperate file | ||
69 | cmd = [conf.BINS["split"], "-r", "-b", event, sorted_bin] | ||
70 | ret = subprocess.call(cmd, cwd=out_dir, | ||
71 | stderr=err_file, stdout=ovh_file) | ||
72 | size = os.stat(ovh_fname).st_size | ||
73 | |||
74 | if ret: | ||
75 | err_file.write("Failed with command: %s" % " ".join(cmd)) | ||
76 | if not size: | ||
77 | os.remove(ovh_fname) | ||
78 | if not size or ret: | ||
79 | continue | ||
80 | |||
81 | # Map and sort file for stats | ||
82 | data = np.memmap(ovh_fname, dtype="float32", mode='c') | ||
83 | data /= float(cycles) # Scale for processor speed | ||
84 | data.sort() | ||
52 | 85 | ||
86 | stats = [event, len(data), data[-1], np.mean(data), data[0], | ||
87 | np.median(data), np.std(data, ddof=1), np.var(data)] | ||
88 | f.write(", ".join([fmt_cell(x) for x in stats])) | ||
89 | f.write("\n") | ||
90 | |||
91 | os.remove(ovh_fname) | ||
92 | |||
93 | def sort_ft(ft_file, err_file, out_dir): | ||
94 | """ | ||
95 | Create and return file with sorted overheads from @ft_file. | ||
96 | """ | ||
97 | out_fname = "{}/{}".format(out_dir, FT_DATA_NAME) | ||
98 | |||
99 | # Sort happens in-place | ||
100 | sh.copyfile(ft_file, out_fname) | ||
101 | cmd = [conf.BINS['ftsort'], out_fname] | ||
102 | ret = subprocess.call(cmd, cwd=out_dir, stderr=err_file, stdout=err_file) | ||
103 | |||
104 | if ret: | ||
105 | raise Exception("Sort failed with command: %s" % " ".join(cmd)) | ||
106 | |||
107 | return out_fname | ||
108 | |||
109 | def extract_ft_data(data_file, result, overheads): | ||
110 | """ | ||
111 | Return exp point with overhead measurements from data_file | ||
112 | """ | ||
53 | with open(data_file) as f: | 113 | with open(data_file) as f: |
54 | data = f.read() | 114 | data = f.read() |
55 | 115 | ||
56 | for ovh in overheads: | 116 | for ovh in overheads: |
117 | regex = r"({}[^\n]*)".format(ovh) | ||
118 | line = re.search(regex, data) | ||
119 | |||
120 | if not line: | ||
121 | continue | ||
122 | |||
123 | vals = re.split(r"[,\s]+", line.groups(1)[0]) | ||
124 | |||
57 | measure = Measurement("%s-%s" % (data_file, ovh)) | 125 | measure = Measurement("%s-%s" % (data_file, ovh)) |
58 | vals = re.findall(r"\s+{}".format(ovh.replace('_','-')) + rstr, data); | 126 | measure[Type.Max] = float(vals[FIELDS.index("max")]) |
59 | if len(vals) != 0: | 127 | measure[Type.Avg] = float(vals[FIELDS.index("avg")]) |
60 | vals = vals[0] | 128 | measure[Type.Var] = float(vals[FIELDS.index("var")]) |
61 | measure[Type.Max] = float(vals[0]) | 129 | |
62 | measure[Type.Avg] = float(vals[1]) | 130 | result[ovh] = measure |
63 | measure[Type.Var] = float(vals[2]) | ||
64 | result[ovh] = measure | ||
65 | 131 | ||
66 | return result | 132 | return result |
diff --git a/parse_exps.py b/parse_exps.py index 87d0783..24bdb85 100755 --- a/parse_exps.py +++ b/parse_exps.py | |||
@@ -52,13 +52,17 @@ def get_exp_params(data_dir, col_map): | |||
52 | # Keep only params that uniquely identify the experiment | 52 | # Keep only params that uniquely identify the experiment |
53 | params = load_params(param_file) | 53 | params = load_params(param_file) |
54 | for ignored in conf.PARAMS.itervalues(): | 54 | for ignored in conf.PARAMS.itervalues(): |
55 | if ignored in params: | 55 | # Always include cycles or overhead parsing fails |
56 | if ignored in params and ignored != conf.PARAMS['cycles']: | ||
56 | params.pop(ignored) | 57 | params.pop(ignored) |
57 | 58 | ||
58 | # Track all changed params | 59 | # Track all changed params |
59 | for key, value in params.iteritems(): | 60 | for key, value in params.iteritems(): |
60 | col_map.try_add(key, value) | 61 | col_map.try_add(key, value) |
61 | 62 | ||
63 | if conf.PARAMS['cycles'] not in params: | ||
64 | params[conf.PARAMS['cycles']] = conf.DEFAULTS['cycles'] | ||
65 | |||
62 | return params | 66 | return params |
63 | 67 | ||
64 | 68 | ||
@@ -78,8 +82,9 @@ def gen_exp_data(exp_dirs, base_conf, col_map, force): | |||
78 | 82 | ||
79 | # Read and translate exp output files | 83 | # Read and translate exp output files |
80 | params = get_exp_params(data_dir, col_map) | 84 | params = get_exp_params(data_dir, col_map) |
85 | cycles = int(params[conf.PARAMS['cycles']]) | ||
81 | st_output = st.get_st_output(data_dir, tmp_dir, force) | 86 | st_output = st.get_st_output(data_dir, tmp_dir, force) |
82 | ft_output = ft.get_ft_output(data_dir, tmp_dir, force) | 87 | ft_output = ft.get_ft_output(data_dir, cycles, tmp_dir, force) |
83 | 88 | ||
84 | 89 | ||
85 | if base_conf and base_conf.viewitems() & params.viewitems(): | 90 | if base_conf and base_conf.viewitems() & params.viewitems(): |
diff --git a/run_exps.py b/run_exps.py index 8812bc6..3efb09d 100755 --- a/run_exps.py +++ b/run_exps.py | |||
@@ -114,9 +114,11 @@ def load_experiment(sched_file, scheduler, duration, param_file, out_dir): | |||
114 | run_exp(sched_file, schedule, scheduler, kernel, duration, work_dir, out_dir) | 114 | run_exp(sched_file, schedule, scheduler, kernel, duration, work_dir, out_dir) |
115 | 115 | ||
116 | # Save parameters used to run experiment in out_dir | 116 | # Save parameters used to run experiment in out_dir |
117 | # Cycles is saved here for accurate overhead calculations later | ||
117 | out_params = dict(params.items() + | 118 | out_params = dict(params.items() + |
118 | [(conf.PARAMS['sched'], scheduler), | 119 | [(conf.PARAMS['sched'], scheduler), |
119 | (conf.PARAMS['dur'], duration)]) | 120 | (conf.PARAMS['dur'], duration), |
121 | (conf.PARAMS['cycles'], lu.cpu_freq())]) | ||
120 | with open("%s/%s" % (out_dir, conf.DEFAULTS['params_file']), 'w') as f: | 122 | with open("%s/%s" % (out_dir, conf.DEFAULTS['params_file']), 'w') as f: |
121 | f.write(str(out_params)) | 123 | f.write(str(out_params)) |
122 | 124 | ||