aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorGlenn Elliott <gelliott@cs.unc.edu>2014-01-15 16:07:13 -0500
committerGlenn Elliott <gelliott@cs.unc.edu>2014-01-15 16:07:13 -0500
commit9ea7ba5fc2c9531d66cd2929e215090487e7d862 (patch)
treeddbb9a5a6876f44a76577a419ad5ddd00e2266ae
parentf3106e83c7404e9de96117770f210cf6a207cc2d (diff)
Clean up PGM task set generation parameters.
Work originally by Namhoon.
-rw-r--r--gen/edf_generators.py75
-rw-r--r--gen/generator.py121
-rwxr-xr-xgen_pgm_exps.py12
3 files changed, 123 insertions, 85 deletions
diff --git a/gen/edf_generators.py b/gen/edf_generators.py
index 58cbb0d..d9e8b31 100644
--- a/gen/edf_generators.py
+++ b/gen/edf_generators.py
@@ -2,12 +2,15 @@ import generator as gen
2import random 2import random
3import ecrts14.partition as partition 3import ecrts14.partition as partition
4import schedcat.sched.split_heuristic as split 4import schedcat.sched.split_heuristic as split
5import ecrts14.ecrts14 as ecrts14
5 6
6from ecrts14.ecrts14 import create_pgm_task_set 7from ecrts14.ecrts14 import create_pgm_task_set
7from ecrts14.ecrts14 import get_overheads 8from ecrts14.ecrts14 import get_overheads
8from config.config import FILES,PARAMS 9from config.config import FILES,PARAMS
9from schedcat.overheads.model import Overheads, CacheDelay, ConsumerOverheads, ProducerOverheads 10from schedcat.overheads.model import Overheads, CacheDelay, ConsumerOverheads, ProducerOverheads
10from ecrts14.tests import get_partitions 11from ecrts14.tests import get_partitions
12from schedcat.generator.tasksets import NAMED_UTILIZATIONS
13from schedcat.mapping.binpack import DidNotFit
11 14
12TP_TBASE = """#for $t in $task_set 15TP_TBASE = """#for $t in $task_set
13{} $t.cost $t.period 16{} $t.cost $t.period
@@ -72,20 +75,65 @@ class EdfPgmGenerator(gen.Generator):
72 75
73 def __make_options(self): 76 def __make_options(self):
74 '''Return generic EDF options.''' 77 '''Return generic EDF options.'''
75 return [gen.Generator._dist_option('utils', 'uni-medium', 78 return [gen.Generator._dist_option('period', ['uni-long'],
76 gen.NAMED_UTILIZATIONS, 79 ecrts14.NAMED_PERIODS_US,
77 'Task utilization distributions.'), 80 'Task period distributions.'),
78 gen.Generator._dist_option('periods', 'harmonic', 81 gen.Generator._dist_option('num_graphs', 'uni-medium',
79 gen.NAMED_PERIODS, 82 ecrts14.NAMED_NUM_GRAPHS,
80 'Task period distributions.')] 83 'Number of graphs.'),
84 gen.Generator._dist_option('depth_factor', ['uni-medium'],
85 ecrts14.NAMED_HEIGHT_FACTORS,
86 'Depth of graphs.'),
87 gen.Generator._dist_option('partitions', ['no_cache', 'parallel', 'cache_aware'],
88 PARTITION_METHOD,
89 'Partition methods.'),
90 gen.Generator._dist_option('node_placement', ['uniform'],
91 ecrts14.NAMED_SHAPES,
92 'The node placement of a graph.'),
93 gen.Generator._dist_option('fan_out', ['uniform_3'],
94 ecrts14.NAMED_FAN,
95 'The number of out edges of a node.'),
96 gen.Generator._dist_option('fan_in_cap', [3],
97 {},
98 'The maximum number of in-edges of a node.'),
99 gen.Generator._dist_option('edge_distance', ['uniform_3'],
100 ecrts14.NAMED_EDGE_HOP,
101 'The number of hops for an edge.'),
102 gen.Generator._dist_option('wss', ['uni-medium'],
103 ecrts14.NAMED_EDGE_WSS,
104 'Working set size.'),
105 gen.Generator._dist_option('task_util', ['uni-light'],
106 NAMED_UTILIZATIONS,
107 'Task utilization.'),
108 gen.Generator._dist_option('polluters', False,
109 {},
110 'Polluters.'),
111 gen.Generator._dist_option('release_master', False,
112 {},
113 'Release master.'),
114 gen.Generator._dist_option('job_splitting', True,
115 {},
116 'Job splitting.'),
117 gen.Generator._dist_option('ovh_type', 'max',
118 {},
119 'Overhead type.'),
120 gen.Generator._dist_option('heur_aggressiveness', 0.75,
121 {},
122 'heur_aggressiveness.'),
123 gen.Generator._dist_option('sys_util', [20.0, 9.0, 8.0, 7.0],
124 {},
125 'System utilization.')]
81 126
82 def _create_exp(self, dp, ts, graphs, subts): 127 def _create_exp(self, dp, ts, graphs, subts):
83 '''Create a single experiment with @exp_params in @out_dir.''' 128 '''Create a single experiment with @exp_params in @out_dir.'''
84 129
85 ts = self._customize(ts, graphs, subts, dp) 130 ret, ts = self._customize(ts, graphs, subts, dp)
86 131
87 self._write_pgm_schedule(dict(dp.items() + [('task_set', ts)] + [('graphs', graphs)] + [('sub_task_set', subts)])) 132 if ret:
88 self._write_params(dict(dp.items() + [('num_tasks', len(ts))])) 133 self._write_pgm_schedule(dict(dp.items() + [('task_set', ts)] + [('graphs', graphs)] + [('sub_task_set', subts)]))
134 self._write_params(dict(dp.items() + [('num_tasks', len(ts))]))
135
136 return ret
89 137
90 def _create_tasks(self, dp): 138 def _create_tasks(self, dp):
91 '''Create a task set.''' 139 '''Create a task set.'''
@@ -211,11 +259,14 @@ class CflSplitPgmGenerator(EdfPgmGenerator):
211 overheads = get_overheads(dp, dp.system) 259 overheads = get_overheads(dp, dp.system)
212 # do the partition here 260 # do the partition here
213 ts = partition.clear_partitioning(ts) 261 ts = partition.clear_partitioning(ts)
214 ts = PARTITION_METHOD[exp_params['partitions']](ts, graphs, subts, cluster_sz, dp.nr_clusters, dp.system, dp.heur_aggressiveness, overheads) 262 try:
263 ts = PARTITION_METHOD[exp_params['partitions']](ts, graphs, subts, cluster_sz, dp.nr_clusters, dp.system, dp.heur_aggressiveness, overheads)
264 except DidNotFit:
265 return False, ts
215 266
216 # compute split factor 267 # compute split factor
217 working_ts = ts 268 working_ts = ts
218 partitions = get_partitions(working_ts, dp.nr_clusters, cluster_sz) 269 partitions = get_partitions(working_ts, dp.nr_clusters, cluster_sz)
219 is_srt_sched = split.compute_splits_nolock(overheads, False, working_ts, partitions, bypass_split = not dp.job_splitting) 270 is_srt_sched = split.compute_splits_nolock(overheads, False, working_ts, partitions, bypass_split = not dp.job_splitting)
220 271
221 return working_ts 272 return True, working_ts
diff --git a/gen/generator.py b/gen/generator.py
index 7456021..2d0fbef 100644
--- a/gen/generator.py
+++ b/gen/generator.py
@@ -15,6 +15,7 @@ from parse.col_map import ColMapBuilder
15from numpy import arange 15from numpy import arange
16from schedcat.util.storage import storage 16from schedcat.util.storage import storage
17from ecrts14.machines import machines 17from ecrts14.machines import machines
18from ecrts14.ecrts14 import NAMED_NUM_GRAPHS, NAMED_SHAPES, NAMED_HEIGHT_FACTORS, NAMED_FAN, NAMED_EDGE_HOP, NAMED_EDGE_WSS
18 19
19NAMED_PERIODS = { 20NAMED_PERIODS = {
20 'harmonic' : rv.uniform_choice([25, 50, 100, 200]), 21 'harmonic' : rv.uniform_choice([25, 50, 100, 200]),
@@ -81,9 +82,7 @@ class Generator(object):
81 release_master = list(set([False, bool(rm_config)])) 82 release_master = list(set([False, bool(rm_config)]))
82 83
83 84
84 return [GenOption('partitions', str, ['no_cache', 'parallel', 'cache_aware', 'cache_aware_edges', 'cache_aware_bfs', 'cache_aware_dfs'], 85 return [GenOption('tasks', int, [0],
85 'Partition methods.'),
86 GenOption('tasks', int, [0],
87 'Number of tasks'), 86 'Number of tasks'),
88 GenOption('cpus', int, [cpus], 87 GenOption('cpus', int, [cpus],
89 'Number of processors on target system.'), 88 'Number of processors on target system.'),
@@ -154,7 +153,6 @@ class Generator(object):
154 split_arg_t = [] 153 split_arg_t = []
155 154
156 for n in g.nodes: 155 for n in g.nodes:
157 assert n.graph.id == g.id
158 cluster_arg_t.append('node_' + str(n.id) + ':' + str(n.task.partition)) 156 cluster_arg_t.append('node_' + str(n.id) + ':' + str(n.task.partition))
159 exec_arg_t.append('node_' + str(n.id) + ':' + str(n.task.cost)) 157 exec_arg_t.append('node_' + str(n.id) + ':' + str(n.task.cost))
160 split_arg_t.append('node_' + str(n.id) + ':' + str(n.task.split)) 158 split_arg_t.append('node_' + str(n.id) + ':' + str(n.task.split))
@@ -192,7 +190,7 @@ class Generator(object):
192 pgm_args = [] 190 pgm_args = []
193 for i in range(len(pgm_params['graphs'])): 191 for i in range(len(pgm_params['graphs'])):
194 pgm_args_t = ''; 192 pgm_args_t = '';
195 pgm_args_t += '--wait --cluster ' + cluster_arg[i] + ' --clusterSize ' + clustersz_arg[i] + ' --name graph_' + str(pgm_params['graphs'][i].id) 193 pgm_args_t += '--wait --cluster ' + cluster_arg[i] + ' --clusterSize ' + clustersz_arg[i] + ' --name graph_' + str(i)
196 pgm_args_t += ' --graph ' + graph_desc_arg[i] + ' --rates ' + rates_arg[i] + ' --execution ' + exec_arg[i] 194 pgm_args_t += ' --graph ' + graph_desc_arg[i] + ' --rates ' + rates_arg[i] + ' --execution ' + exec_arg[i]
197 pgm_args_t += ' --split ' + split_arg[i] 195 pgm_args_t += ' --split ' + split_arg[i]
198 if len(wss_arg[i]) != 0: 196 if len(wss_arg[i]) != 0:
@@ -334,79 +332,78 @@ class Generator(object):
334 out_dir = opts.out_dir 332 out_dir = opts.out_dir
335 force = opts.force 333 force = opts.force
336 trials = opts.trials 334 trials = opts.trials
337 335
338 # hard coded here 336 # Hardcoded design points
339 exp = storage() 337 exp = storage()
340 exp.host = ['ludwig'] 338 exp.host = ['ludwig']
341 cpus = 24.0 339 cpus = 24.0
342 exp.processors = [cpus] 340 exp.processors = [cpus]
343 exp.task_util = ['uni-medium']
344 exp.period = ['uni-long']
345 exp.sched = ['edf']
346 exp.sys_util = [ 20.0 ] #arange(1, cpus+0.1, 0.1)
347
348 exp.wcycle = [ 0 ] 341 exp.wcycle = [ 0 ]
349 exp.walk = ['seq'] 342 exp.walk = ['seq']
350 exp.huge_pages = [False] 343 exp.huge_pages = [False]
351 exp.uncached = [False] 344 exp.uncached = [False]
352 exp.polluters = [False] 345 exp.sched = ['edf']
353 exp.ovh_type = ['max']
354 exp.release_master = [False]
355 exp.heur_aggressiveness = [0.75]
356 exp.job_splitting = [True]
357 exp.update(self.params) 346 exp.update(self.params)
358 347
359 # Track changing values so only relevant parameters are included 348 # Track changing values so only relevant parameters are included
360 # in directory names 349 # in directory names
361 for dp in PgmDesignPointGenerator(exp): 350 for dp in PgmDesignPointGenerator(exp):
362 for k, v in dp.iteritems(): 351 for k, v in dp.iteritems():
363 builder.try_add(k, v) 352 builder.try_add(k, v)
364 col_map = builder.build() 353 col_map = builder.build()
354
355 partition_method = exp['partitions']
356 del exp['partitions']
365 357
366 for trial in xrange(trials): 358 for dp in PgmDesignPointGenerator(exp):
367 dp.num_graphs = graph.uniform(opts.num_graphs, opts.num_graphs) 359 for trial in xrange(trials):
368 dp.depth_factor = [1.0/3.0, 2.0/3.0] 360 dp.num_graphs = NAMED_NUM_GRAPHS[dp.num_graphs]
369 dp.node_placement = graph.uniform(opts.node_placement, opts.node_placement) 361 dp.depth_factor = NAMED_HEIGHT_FACTORS[dp.depth_factor]
370 dp.fan_out = graph.geometric(1, opts.fan_out) 362 dp.node_placement = NAMED_SHAPES[dp.node_placement]
371 dp.fan_in_cap = opts.fan_in_cap 363 dp.fan_out = NAMED_FAN[dp.fan_out]
372 dp.edge_distance = graph.geometric(1, opts.edge_distance) 364 dp.edge_distance = NAMED_EDGE_HOP[dp.edge_distance]
373 dp.nr_source = graph.uniform(opts.nr_source, opts.nr_source) 365 dp.nr_source = graph.uniform(opts.nr_source, opts.nr_source)
374 dp.nr_sink = graph.uniform(opts.nr_sink, opts.nr_sink) 366 dp.nr_sink = graph.uniform(opts.nr_sink, opts.nr_sink)
375 dp.wss = tasks.multimodal([(tasks.uniform_int(1,2), 6), (tasks.uniform_int(2, 8), 3)]) 367 dp.wss = NAMED_EDGE_WSS[dp.wss]
376 368
377 # Generate a task set 369 # Generate a task set
378 ts, graphs, subts = self._create_tasks(dp) 370 ts, graphs, subts = self._create_tasks(dp)
379 dp.tasks = len(ts) 371 dp.tasks = len(ts)
380 372
381 for dp in PgmDesignPointGenerator(exp): 373 #for dp in PgmDesignPointGenerator(exp):
382 # Create directory name from relevant parameters 374 for pm in partition_method:
383 dir_leaf = "sched=%s_%s" % (self.scheduler, col_map.encode(dp)) 375 dp.partitions = pm
384 dir_leaf = dir_leaf.strip('_') # If there are none 376 # Create directory name from relevant parameters
385 dir_leaf += ("_trial=%s" % trial) if trials > 1 else "" 377 dir_leaf = "sched=%s_%s" % (self.scheduler, col_map.encode(dp))
386 378 dir_leaf = dir_leaf.strip('_') # If there are none
387 dir_path = "%s/%s" % (out_dir, dir_leaf.strip('_')) 379 dir_leaf += ("_trial=%s" % trial) if trials > 1 else ""
388 380
389 if os.path.exists(dir_path): 381 dir_path = "%s/%s" % (out_dir, dir_leaf.strip('_'))
390 if force: 382
391 sh.rmtree(dir_path) 383 if os.path.exists(dir_path):
392 else: 384 if force:
393 print("Skipping existing experiment: '%s'" % dir_path) 385 sh.rmtree(dir_path)
394 continue 386 else:
395 387 print("Skipping existing experiment: '%s'" % dir_path)
396 os.mkdir(dir_path) 388 continue
397 389
398 if trials > 1: 390 os.mkdir(dir_path)
399 dp[PARAMS['trial']] = trial 391
400 self.out_dir = dir_path 392 if trials > 1:
401 393 dp[PARAMS['trial']] = trial
402 dp.system = topology.Topology(machines[dp.host]) 394 self.out_dir = dir_path
403 395
404 # Write a sched.py and param.py for each partition method 396 dp.system = topology.Topology(machines[dp.host])
405 self._create_exp(dp, ts, graphs, subts) 397
406 398 # Write a sched.py and param.py for each partition method
407 del(self.out_dir) 399 ret = self._create_exp(dp, ts, graphs, subts)
408 if PARAMS['trial'] in dp: 400 if not ret:
409 del dp[PARAMS['trial']] 401 print("Bin-packing fails for " + dir_leaf)
402 os.rmdir(dir_path)
403
404 del(self.out_dir)
405 if PARAMS['trial'] in dp:
406 del dp[PARAMS['trial']]
410 407
411 def print_help(self): 408 def print_help(self):
412 display_options = [o for o in self.options if not o.hidden] 409 display_options = [o for o in self.options if not o.hidden]
diff --git a/gen_pgm_exps.py b/gen_pgm_exps.py
index 62723ec..c6f6198 100755
--- a/gen_pgm_exps.py
+++ b/gen_pgm_exps.py
@@ -30,19 +30,9 @@ def parse_args():
30 parser.add_option('-d', '--describe-generators', metavar='generator[,..]', 30 parser.add_option('-d', '--describe-generators', metavar='generator[,..]',
31 dest='described', default=None, 31 dest='described', default=None,
32 help='describe parameters for generator(s)') 32 help='describe parameters for generator(s)')
33 parser.add_option('-m', '--num-graphs', default=24, type='int', dest='num_graphs',
34 help='number of graphs for a taskset')
35 parser.add_option('-p', '--node-placement', default=1, type='int', dest='node_placement',
36 help='node placement of the graph')
37 parser.add_option('-t', '--fan-out', default=3, type='int', dest='fan_out',
38 help='fan out of a node')
39 parser.add_option('-i', '--fan-in-cap', default=3, type='int', dest='fan_in_cap',
40 help='fan in cap')
41 parser.add_option('-g', '--edge-distance', default=3, type='int', dest='edge_distance',
42 help='edge distance')
43 parser.add_option('-u', '--nr-source', default=1, type='int', dest='nr_source', 33 parser.add_option('-u', '--nr-source', default=1, type='int', dest='nr_source',
44 help='number of source nodes') 34 help='number of source nodes')
45 parser.add_option('-v', '--nr_sink', default=1, type='int', dest='nr_sink', 35 parser.add_option('-v', '--nr-sink', default=1, type='int', dest='nr_sink',
46 help='number of sink nodes') 36 help='number of sink nodes')
47 37
48 38