#!/usr/bin/env python
import sys
import re
import os
import random
import copy
import xml.parsers.expat
from optparse import OptionParser, OptionValueError
from federation.remote_service import service_caller
from federation.service_error import service_error
from deter import topdl
class config_error(RuntimeError): pass
class constraint:
"""
This is mainly a struct to hold constraint fields and convert to XML output
"""
def __init__(self, name=None, required=False, accepts=None, provides=None,
topology=None, match=None):
self.name = name
self.required = required
self.accepts = accepts or []
self.provides = provides or []
self.topology = None
self.match = match
def __str__(self):
return "%s:%s:%s:%s" % (self.name, self.required,
",".join(self.provides), ",".join(self.accepts))
def to_xml(self):
rv = ""
rv += "%s" % self.name
rv += "%s" % self.required
for p in self.provides:
rv += "%s" % p
for a in self.accepts:
rv += "%s" % a
rv+= ""
return rv
def constraints_from_xml(string=None, file=None, filename=None,
top="constraints"):
"""
Pull constraints from an xml file. Only constraints in the top element are
recognized. A constraint consists of a constraint element with one name,
one required, and multiple accepts nd provides elements. Each contains a
string. A list of constraints is returned.
"""
class parser:
"""
A little class to encapsulate some state used to parse the constraints.
The methods are all bound to the analogous handlers in an XML parser.
It collects the constraints in self.constraint.
"""
def __init__(self, top):
self.top = top
self.constraints = [ ]
self.chars = None
self.current = None
self.in_top = False
def start_element(self, name, attrs):
# Clear any collected strings (from inside elements)
self.chars = None
self.key = str(name)
# See if we've entered the containing context
if name == self.top:
self.in_top = True
# Entering a constraint, create the object which also acts as a
# flag to indicate we're collecting constraint data.
if self.in_top:
if name == 'constraint':
self.current = constraint()
def end_element(self, name):
if self.current:
# In a constraint and leaving an element. Clean up any string
# we've collected and process elements we know.
if self.chars is not None:
self.chars = self.chars.strip()
if name == 'required':
if self.chars is None:
self.current.required = False
else:
self.current.required = (self.chars.lower() == 'true')
elif name == 'name':
self.current.name = self.chars
elif name == 'accepts':
self.current.accepts.append(self.chars)
elif name == 'provides':
self.current.provides.append(self.chars)
elif name == 'constraint':
# Leaving this constraint. Record it and clear the flag
self.constraints.append(self.current)
self.current = None
else:
print >>sys.stderr, \
"Unexpected element in constraint: %s" % name
elif name == self.top:
# We've left the containing context
self.in_top = False
def char_data(self, data):
# Collect strings if we're in the overall context
if self.in_top:
if self.chars is None: self.chars = data
else: self.chars += data
# Beginning of constraints_from_xml. Connect up the parser and call it
# properly for the kind of input supplied.
p = parser(top=top)
xp = xml.parsers.expat.ParserCreate()
xp.StartElementHandler = p.start_element
xp.EndElementHandler = p.end_element
xp.CharacterDataHandler = p.char_data
if len([x for x in (string, filename, file) if x is not None])!= 1:
raise RuntimeError("Exactly one one of file, filename and string " + \
"must be set")
elif filename:
f = open(filename, "r")
xp.ParseFile(f)
f.close()
elif file:
xp.ParseFile(file)
elif string:
xp.Parse(string, isfinal=True)
else:
return []
return p.constraints
class ComposeOptionParser(OptionParser):
"""
This class encapsulates the options to this script in one place. It also
holds the callback for the multifile choice.
"""
def __init__(self):
OptionParser.__init__(self)
self.add_option('--url', dest='url', default="http://localhost:23235",
help='url of ns2 to topdl service')
self.add_option('--certfile', dest='cert', default=None,
help='Certificate to use as identity')
self.add_option('--seed', dest='seed', type='int', default=None,
help='Random number seed')
self.add_option('--multifile', dest='files', default=[], type='string',
action='callback', callback=self.multi_callback,
help="Include file multiple times")
self.add_option('--output', dest='outfile', default=None,
help='Output file name')
self.add_option("--format", dest="format", type="choice",
choices=("xml", "topdl", "tcl", "ns"),
help="Output file format")
self.add_option('--add_testbeds', dest='add_testbeds', default=False,
action='store_true',
help='add testbed attributes to each component')
self.add_option('--output_testbeds', dest='output_testbeds',
default=False, action='store_true',
help='Output tb-set-node-testbed commands to ns2')
self.add_option('--lax', dest='lax', default=False,
action='store_true',
help='allow solutions where required constraints fail')
self.add_option('--same_node', dest='same_node', action='store_true',
default=False,
help='Allow loops to the same node to be created.')
self.add_option('--same_topology', dest='same_topo',
action='store_true', default=False,
help='Allow links within the same topology to be created.')
self.add_option('--same_pair', dest='multi_pair',
action='store_true', default=False,
help='Allow multiple links between the same nodes ' + \
'to be created.')
self.add_option('--config', dest='config', default=None,
help='Configuration file of options')
@staticmethod
def multi_callback(option, opt_str, value, parser):
"""
Parse a --multifile command line option. The parameter is of the form
filename,count. This splits the argument at the rightmost comma and
inserts the filename, count tuple into the "files" option. It handles
a couple error cases, too.
"""
idx = value.rfind(',')
if idx != -1:
try:
parser.values.files.append((value[0:idx], int(value[idx+1:])))
except ValueError, e:
raise OptionValueError(
"Can't convert %s to int in multifile (%s)" % \
(value[idx+1:], value))
else:
raise OptionValueError(
"Bad format (need a comma) for multifile: %s" % value)
def warn(msg):
"""
Exactly what you think. Print a message to stderr
"""
print >>sys.stderr, msg
def make_new_name(names, prefix="name"):
"""
Generate an identifier not present in names by appending an integer to
prefix. The new name is inserted into names and returned.
"""
i = 0
n = "%s%d" % (prefix,i)
while n in names:
i += 1
n = "%s%d" % (prefix,i)
names.add(n)
return n
def base_name(n):
"""
Extract a base name of the node to use for constructing a non-colliding
name. This makes the composed topologies a little more readable. It's a
single regexp, but the function name is more meaningful.
"""
return re.sub('\d+$', '',n)
def localize_names(top, names, marks):
"""
Take a topology and rename any substrates or elements that share a name
with an existing computer or substrate. Keep the original name as a
localized_name attribute. In addition, remap any constraints or interfaces
that point to the old name over to the new one. Constraints are found in
the marks dict, indexed by node name. Those constraints name attributes
have already been converted to triples (node name, interface name,
topology) so only the node name needs to be changed.
"""
sub_map = { }
for s in top.substrates:
s.set_attribute('localized_name', s.name)
if s.name in names:
sub_map[s.name] = n = make_new_name(names, base_name(s.name))
s.name = n
else:
names.add(s.name)
for e in [ e for e in top.elements if isinstance(e, topdl.Computer)]:
e.set_attribute('localized_name', e.name)
if e.name in names:
nn= make_new_name(names, base_name(e.name))
for c in marks.get(e.name, []):
c.name = nn
e.name = nn
else:
names.add(e.name)
# Interface mapping. The list comprehension creates a list of
# substrate names where each element in the list is replaced by the
# entry in sub_map indexed by it if present and left alone otherwise.
for i in e.interface:
i.substrate = [ sub_map.get(ii, ii) for ii in i.substrate ]
def meet_constraints(candidates, provides, accepts,
same_node=False, same_topo=False, multi_pair=False):
"""
Try to meet all the constraints in candidates using the information in the
provides and accepts dicts (which index constraints that provide or accept
the given attribute). A constraint is met if it can be matched with another
constraint that provides an attribute that the first constraint accepts.
Only one match per pair is allowed, and we always prefer matching a
required constraint to an unreqired one. If all the candidates can be
matches, return True and return False otherwise.
"""
got_all = True
node_match = { }
for c in candidates:
if not c.match:
rmatch = None # Match to a required constraint
umatch = None # Match to an unrequired constraint
for a in c.accepts:
for can in provides.get(a,[]):
# A constraint cannot satisfy itself nor can it match
# multiple times.
if can != c and not can.match:
# Unless same_node is true disallow nodes satisfying
# their own constraints.
if not same_node and can.name == c.name:
continue
# Unless same_topo is true, exclude nodes in the same
# topology.
if not same_topo and can.topology == c.topology:
continue
# Don't allow multiple matches between the same nodes
if not multi_pair and c.name in node_match and \
can.name in node_match[c.name]:
continue
# Now check that can also accepts c
for ca in can.accepts:
if ca in c.provides:
if can.required: rmatch = can
else: umatch = can
break
# Within providers, prefer matches against required
# composition points.
if rmatch:
break
# Within acceptance categories, prefer matches against required
# composition points
if rmatch:
break
# Move the better match over to the match variable
if rmatch: match = rmatch
elif umatch: match = umatch
else: match = None
# Done checking all possible matches. Record the match or note an
# unmatched candidate.
if match:
match.match = c
c.match = match
# Note the match of the nodes
for a, b in ((c.name, match.name), (match.name, c.name)):
if a in node_match: node_match[a].append(b)
else: node_match[a] = [ b]
else:
got_all = False
return got_all
def randomize_constraint_order(indexes):
"""
Randomly reorder the lists of constraints that provides and accepts hold.
"""
if not isinstance(indexes, tuple):
indexes = (indexes,)
for idx in indexes:
for k in idx.keys():
random.shuffle(idx[k])
def remote_ns2topdl(uri, desc, cert):
"""
Call a remote service to convert the ns2 to topdl (and in fact all the way
to a topdl.Topology.
"""
req = { 'description' : { 'ns2description': desc }, }
r = service_caller('Ns2Topdl')(uri, req, cert)
if r.has_key('Ns2TopdlResponseBody'):
r = r['Ns2TopdlResponseBody']
ed = r.get('experimentdescription', None)
if 'topdldescription' in ed:
return topdl.Topology(**ed['topdldescription'])
else:
return None
else:
return None
def connect_composition_points(top, contraints, names):
"""
top is a topology containing copies of all the topologies represented in
the contsraints, flattened into one name space. This routine inserts the
additional substrates required to interconnect the topology as described by
the constraints. After the links are made, unused connection points are
purged.
"""
done = set()
for c in constraints:
if c not in done and c.match:
# c is an unprocessed matched constraint. Create a substrate
# and attach it to the interfaces named by c and c.match.
sn = make_new_name(names, "sub")
s = topdl.Substrate(name=sn)
# These are the nodes that need to be connected. Put an new
# interface on each one and hook them to the new substrate.
for e in [ e for e in top.elements \
if isinstance(e, topdl.Computer) \
and e.name in (c.name, c.match.name)]:
ii = make_new_name(set([x.name for x in e.interface]), "inf")
e.interface.append(
topdl.Interface(substrate=[sn], name=ii, element=e))
# c and c.match have been processed, so add them to the done set,
# and add the substrate
top.substrates.append(s)
done.add(c)
done.add(c.match)
top.incorporate_elements()
def import_ns2_services(contents):
"""
Contents is a list containing the lines of an annotated ns2 file. This
routine extracts the service comment lines and puts them into an array for
output later if the output format is tcl.
Services are given in lines of the form
# SERVICE:name:exporter:importers:attributes
See http://fedd.deterlab.net/wiki/FeddAbout#ExperimentServices
"""
service_re = re.compile("\s*#\s*SERVICE.*")
services = [ ]
for l in contents:
m = service_re.search(l)
if m: services.append(l)
return services
def import_ns2_constraints(contents):
"""
Contents is a list containing the lines of an annotated ns2 file. This
routine extracts the constraint comment lines and convertes them into
constraints in the namespace of the tcl experiment, as well as inserting
them in the accepts and provides indices.
Constraints are given in lines of the form
name:required:provides:accepts
where name is the name of the node in the current topology, if the second
field is "required" this is a required constraint, a list of attributes
that this connection point provides and a list that it accepts. The
provides and accepts lists are comma-separated. The constraint is added to
the marks dict under the name key and that dict is returned.
"""
const_re = re.compile("\s*#\s*COMPOSITION:\s*([^:]+:[^:]+:.*)")
constraints = [ ]
for l in contents:
m = const_re.search(l)
if m:
exp = re.sub('\s', '', m.group(1))
nn, r, p, a = exp.split(":")
if nn.find('(') != -1:
# Convert array references into topdl names
nn = re.sub('\\((\\d)\\)', '-\\1', nn)
p = p.split(",")
a = a.split(",")
constraints.append(constraint(name=nn, required=(r == 'required'),
provides=p, accepts=a))
return constraints
def import_ns2_component(fn):
"""
Pull a composition component in from an ns2 description. The Constraints
are parsed from the comments using import_ns2_constraints and the topology
is created using a call to a fedd exporting the Ns2Topdl function. A
topdl.Topology object rempresenting the component's topology and a dict
mapping constraints from the components names to the conttraints is
returned. If either the file read or the conversion fails, appropriate
Exceptions are thrown.
"""
f = open(fn, "r")
contents = [ l for l in f ]
marks = import_ns2_constraints(contents)
services = import_ns2_services(contents)
top = remote_ns2topdl(opts.url, "".join(contents), cert)
if not top:
raise RuntimeError("Cannot create topology from: %s" % fn)
return (top, marks, services)
def import_xml_component(fn):
"""
Pull a component in from a topdl description.
"""
return (topdl.topology_from_xml(filename=fn, top='experiment'),
constraints_from_xml(filename=fn, top='constraints'), [])
def index_constraints(constraints, provides, accepts, names):
"""
Add constraints to the provides and accepts indices based on the attributes
of the contstraints. Also index by name.
"""
for c in constraints:
for attr, dict in ((c.provides, provides), (c.accepts, accepts)):
for a in attr:
if a not in dict: dict[a] = [c]
else: dict[a].append(c)
if c.name in names: names[c.name].append(c)
else: names[c.name]= [ c ]
def get_suffix(fn):
"""
We get filename suffixes a couple places. It;s worth using the same code.
This gets the shortest . separated suffix from a filename, or None.
"""
idx = fn.rfind('.')
if idx != -1: return fn[idx+1:]
else: return None
def output_composition(top, constraints, outfile=None, format=None,
output_testbeds=False, services=None):
"""
Output the composition to the file named by outfile (if any) in the format
given by format if any. If both are None, output to stdout in topdl
"""
def xml_out(f, top, constraints, output_testbeds, services):
"""
Output into topdl. Just call the topdl output, as the constraint
attributes should be in the topology.
"""
print >>f, ""
if constraints:
print >>f, ""
for c in constraints:
print >>f, c.to_xml()
print >>f, ""
print >>f, topdl.topology_to_xml(comp, top='experiment')
print >>f, ""
def ns2_out(f, top, constraints, output_testbeds, services):
"""
Reformat the constraint data structures into ns2 constraint comments
and output the ns2 using the topdl routine.
"""
# Inner routines
# Deal with the possibility that the single string name is still in the
# constraint.
def name_format(n):
if isinstance(n, tuple): return n[0]
else: return n
# Format the required field back into a string. (ns2_out inner
def required_format(x):
if x: return "required"
else: return "optional"
def testbed_filter(e):
if isinstance(e, topdl.Computer) and e.get_attribute('testbed'):
return 'tb-set-node-testbed ${%s} "%s"' % \
(topdl.to_tcl_name(e.name), e.get_attribute('testbed'))
else:
return ""
if output_testbeds: filters = [ testbed_filter ]
else: filters = [ ]
# ns2_out main line
for c in constraints:
print >>f, "# COMPOSITION: %s:%s:%s:%s" % (
topdl.to_tcl_name(name_format(c.name)),
required_format(c.required), ",".join(c.provides),
",".join(c.accepts))
for s in services:
print >>f, s
print >>f, topdl.topology_to_ns2(top, filters=filters)
# Info to map from format to output routine.
exporters = {
'xml':xml_out, 'topdl':xml_out,
'tcl': ns2_out, 'ns': ns2_out,
}
if format:
# Explicit format set, use it
if format in exporters:
exporter = exporters[format]
else:
raise RuntimeError("Unknown format %s" % format)
elif outfile:
# Determine the format from the suffix (if any)
s = get_suffix(outfile)
if s and s in exporters:
exporter = exporters[s]
else:
raise RuntimeError("Unknown format (suffix) %s" % outfile)
else:
# Both outfile and format are empty
exporter = xml_out
# The actual output. Open the file, if any, and call the exporter
if outfile: f = open(outfile, "w")
else: f = sys.stdout
exporter(f, top, constraints, output_testbeds, services)
if outfile: f.close()
def import_components(files):
"""
Pull in the components. The input is a sequence of tuples where each tuple
includes the name of the file to pull the component from and the number of
copies to make of it. The routine to read a file is picked by its suffix.
On completion, a tuple containing the number of components successfully
read, a list of the topologies, a set of names in use accross all
topologies (for inserting later names), the constraints extracted, and
indexes mapping the attribute provices and accepted tinto the lists of
constraints that provide or accept them is returned.
"""
importers = {
'tcl': import_ns2_component,
'ns': import_ns2_component,
'xml':import_xml_component,
'topdl':import_xml_component,
}
names = set()
constraints = [ ]
provides = { }
accepts = { }
components = 0
topos = [ ]
services = [ ]
for fn, cnt in files:
try:
s = get_suffix(fn)
if s and s in importers:
top, cons, svcs = importers[s](fn)
else:
warn("Unknown suffix on file %s. Ignored" % fn)
continue
except service_error, e:
warn("Remote error on %s: %s" % (fn, e))
continue
except EnvironmentError, e:
warn("Error on %s: %s" % (fn, e))
continue
# Parsed the component and sonctraints, now work through the rest of
# the pre-processing. We do this once per copy of the component
# requested, cloning topologies and constraints each time.
for i in range(0, cnt):
components += 1
t = top.clone()
c = copy.deepcopy(cons)
marks = { }
# Bind the constraints in m (the marks copy) to this topology
for cc in c:
cc.topology = t
index_constraints(c, provides, accepts, marks)
localize_names(t, names, marks)
constraints.extend(c)
services.extend(svcs)
topos.append(t)
return (components, topos, names, constraints, provides, accepts, services)
def parse_config(fn, opts):
"""
Pull configuration options from a file given in fn. These mirror the
command line options.
"""
# These functions make up a jump table for parsing lines of config.
# They're dispatched from directives below.
def file(args, opts):
i = args.find(',')
if i == -1: opts.files.append((args, 1))
else:
try:
opts.files.append((args[0:i], int(args[i+1:])))
except ValueError:
raise config_error("Cannot convert %s to an integer" % \
args[i+1:])
def url(args, opts):
opts.url=args
def output(args, opts):
opts.outfile=args
def format(args, opts):
valid = ("xml", "topdl", "tcl", "ns")
if args in valid:
opts.format = args
else:
raise config_error("Bad format %s. Must be one of %s" % \
(args, valid))
def tb_out(args, opts):
opts.output_testbeds = True
def tb_add(args, opts):
opts.add_testbeds = True
def seed(args, opts):
try:
opts.seed = int(args)
except ValueError:
raise config_error("Cannot convert %s to an integer" % args)
def lax(args, opts):
opts.lax = True
def same_node(args, opts):
opts.same_node = True
def same_topo(args, opts):
opts.same_topo = True
def same_pair(args, opts):
opts.multi_pair = True
directives = {
'file': file,
'multifile': file,
'output': output,
'url': url,
'format': format,
'output_testbeds': tb_out,
'add_testbeds': tb_add,
'seed': seed,
'lax': lax,
'same_node': same_node,
'same_topo': same_topo,
'same_pair': same_pair,
}
comment_re = re.compile('^\s*#')
blank_re = re.compile('^\s*$')
# Parse_config code begins
f = open(fn, "r")
try:
for i, l in enumerate(f):
if blank_re.match(l) or comment_re.match(l):
continue
if l.find('=') != -1: dir, args = l.split('=', 1)
else: dir, args = (l, "")
dir, args = (dir.strip(), args.strip())
try:
if dir in directives: directives[dir](args, opts)
else: file(dir, opts)
except config_error, e:
raise config_error("%s in line %d" % (e, i+1))
finally:
if f: f.close()
# Main line begins
parser = ComposeOptionParser()
opts, args = parser.parse_args()
if opts.config:
try:
parse_config(opts.config, opts)
except EnvironmentError, e:
sys.exit("Can't open configuration: %s" %e)
except config_error, e:
sys.exit(e)
if opts.cert:
cert = opts.cert
elif os.access(os.path.expanduser("~/.ssl/emulab.pem"), os.R_OK):
cert = os.path.expanduser("~/.ssl/emulab.pem")
else:
cert = None
random.seed(opts.seed)
files = opts.files
files.extend([ (a, 1) for a in args])
# Pull 'em in.
components, topos, names, constraints, provides, accepts, services = \
import_components(files)
# If more than one component was given, actually do the composition, otherwise
# this is probably a format conversion.
if components > 1:
# Mix up the constraint indexes
randomize_constraint_order((provides, accepts))
# Now the various components live in the same namespace and are marked with
# their composition requirements.
if not meet_constraints([c for c in constraints if c.required],
provides, accepts, opts.same_node, opts.same_topo, opts.multi_pair):
if opts.lax:
warn("warning: could not meet all required constraints")
else:
sys.exit("Failed: could not meet all required constraints")
meet_constraints([ c for c in constraints if not c.match ],
provides, accepts, opts.same_node, opts.same_topo, opts.multi_pair)
# Add testbed attributes if requested
if opts.add_testbeds:
for i, t in enumerate(topos):
for e in [ e for e in t.elements if isinstance(e, topdl.Computer)]:
e.set_attribute('testbed', 'testbed%03d' % i)
# Make a topology containing all elements and substrates from components
# that had matches.
comp = topdl.Topology()
for t in set([ c.topology for c in constraints if c.match]):
comp.elements.extend([e.clone() for e in t.elements])
comp.substrates.extend([s.clone() for s in t.substrates])
# Add substrates and connections corresponding to composition points
connect_composition_points(comp, constraints, names)
elif components == 1:
comp = topos[0]
if opts.add_testbeds:
for e in [ e for e in comp.elements if isinstance(e, topdl.Computer)]:
e.set_attribute('testbed', 'testbed001')
else:
sys.exit("Did not read any components.")
# Put out the composition with only the unmatched constraints
output_composition(comp, [c for c in constraints if not c.match],
opts.outfile, opts.format, opts.output_testbeds, services)
sys.exit(0)