Cloning into '/root/all-modules-git/DistributedRateLimiting'... fatal: '/git/DistributedRateLimiting.git' does not appear to be a git repository fatal: Could not read from remote repository. Please make sure you have the correct access rights and the repository exists. Cloning into '/root/all-modules-git/PingOfDeath'... fatal: '/git/PingOfDeath.git' does not appear to be a git repository fatal: Could not read from remote repository. Please make sure you have the correct access rights and the repository exists. Cloning into '/root/all-modules-git/PLCRT'... fatal: '/git/PLCRT.git' does not appear to be a git repository fatal: Could not read from remote repository. Please make sure you have the correct access rights and the repository exists. Auto-merging plnet.py CONFLICT (content): Merge conflict in plnet.py Automatic merge failed; fix conflicts and then commit the result. FAILED ! -- out+err below (command was git clone root@git.onelab.eu:/git/DistributedRateLimiting.git /root/all-modules-git/DistributedRateLimiting) FAILED ! -- end of quoted output FAILED ! -- out+err below (command was git clone root@git.onelab.eu:/git/PingOfDeath.git /root/all-modules-git/PingOfDeath) FAILED ! -- end of quoted output FAILED ! -- out+err below (command was git clone root@git.onelab.eu:/git/PLCRT.git /root/all-modules-git/PLCRT) FAILED ! -- end of quoted output FAILED ! -- out+err below (command was git merge --ff origin/master) FAILED ! -- end of quoted output
Changes to tag in mainline - status on 2016-09-24 at 22:06 (CEST)
< (left) monitor-3.1-6
> (right) monitor
diff --git a/commands/myops.py b/commands/myops.py
new file mode 100755
index 0000000..75b969e
--- /dev/null
+++ b/commands/myops.py
@@ -0,0 +1,352 @@
+#!/usr/bin/python
+
+import os
+import sys
+import string
+import time
+
+import getopt
+import sys
+import os
+import xml, xmlrpclib
+from getpass import getpass
+from operator import attrgetter, itemgetter
+
+def get_plc_api(target, url, username, password, expires, debug_mode):
+ # Either read session from disk or create it and save it for later
+ metasession = "%s/%s_%s" % (os.environ['HOME'], ".metasession", target)
+ if os.path.exists(metasession):
+ (localurl,session) = open(metasession, 'r').read().strip().split()
+ plc = xmlrpclib.Server(localurl, verbose=False, allow_none=True)
+ else:
+ plc = xmlrpclib.Server(url, verbose=False, allow_none=True)
+ if password == None: password = getpass()
+ auth = {'Username' : username,
+ 'AuthMethod' : 'password',
+ 'AuthString' : password}
+ session = plc.GetSession(auth, expires*(60*60*24))
+ with open(metasession, 'w') as f: f.write("%s %s\n" % (url,session)) # 'with' auto-closes
+
+ auth = {'AuthMethod' : 'session', 'session' : session}
+
+ class PLC:
+ def __init__(self, plc, auth):
+ self.plc = plc
+ self.auth = auth
+
+ def __getattr__(self, name):
+ method = getattr(self.plc, name)
+ if method is None:
+ raise AssertionError("Method does not exist: %s" % method)
+ if not debug_mode or ('Get' in name or 'AuthCheck' in name):
+ return lambda *params : method(self.auth, *params)
+ else:
+ def call(name,*params):
+ print "DEBUG not running: %s(%s)" % (name, params)
+ return lambda *params : call(name,*params)
+
+ plc_api = PLC(plc, auth)
+ try:
+ # make sure the session is working
+ plc_api.AuthCheck()
+ except:
+ # everything worked except the auth check. try again asking for passwd.
+ plc_api = get_plc_api(target, url, username, None, expires, debug_mode)
+
+ return plc_api
+
+def usage(parser):
+ print """
+myops.py <TARGET> <ACTION> [<object>] [args]
+ MYOPS CLI uses sessions to avoid storing passwords.
+ You choose the session expiration via --expires <days>.
+
+TARGET:
+ When your session is saved it is identified by your given 'target'
+ name. This is a unique string you chose to identify the --apiurl.
+ For example, one might use:
+ plc
+ vicci
+ test
+ vini
+
+ACTION:
+ Connect to TARGET and perform ACTION. The current actions are:
+ enabled -- Manage site, node, and slice 'enabled' states.
+ Object may be sitename, hostname, or slicename.
+
+ exempt -- Manage site, node, and slice exemptions from
+ myops policy actions. Object may be sitename,
+ hostname, or slicename.
+
+ removeall -- Remove all exemptions at site & from nodes, slices
+
+ exemptall -- Add exemptions at site & to nodes, slices
+
+ disableall-- Disable everything at a site:
+ disable site,
+ disable slices,
+
+ enableall -- Release everything at a site:
+ re-enable site,
+ re-enable slices,
+
+EXAMPLES:
+ # setup session and save target name 'plc' for future calls
+ myops.py plc --apiurl https://boot.planet-lab.org/PLCAPI/ \\
+ --username soltesz@cs.princeton.edu
+
+ # list current exemptions at plc
+ myops.py plc exempt
+
+ # to list only one site (nothing will show if no exemption is present)
+ myops.py plc exempt princeton
+
+ # add an exemption until a specific date
+ myops.py plc exempt princeton --expires 20120131
+
+ # remove this exemption
+ myops.py plc exempt princeton -r
+
+ # exempt just a slice, not the whole site.
+ myops.py plc exempt princeton_comon --expires 20120131
+
+ # re-enable a site & slices then, exempt site & slices for 7 days
+ myops.py plc enableall princeton
+ myops.py plc exemptall princeton --expires 7
+
+"""
+ parser.print_help()
+
+def unparse_expire_str(value):
+ if value == None:
+ expires = 60*60*24*30 # 30 days default
+ else:
+ expires = time.mktime(time.strptime(value, "%Y%m%d")) - time.time()
+ return int(expires)
+
+def parse_expire_str(value):
+ import optparse
+ if value == None:
+ return None
+ elif len(value) <= 3:
+ # days from now
+ value = time.strftime("%Y%m%d", time.localtime(time.time()+int(value)*60*60*24))
+ elif len(value) != 8 and value[:3] != "201": # 201 == this decade.
+ # flip out
+ raise optparse.OptionValueError
+ return value
+
+class PlcObj(object):
+ def __init__(self, name):
+ if type(name) == type(""):
+ self.name = name
+ elif type(name) == type({}):
+ if 'login_base' in name:
+ self.name = name['login_base']
+ elif 'hostname' in name:
+ self.name = name['hostname']
+ elif 'name' in name:
+ self.name = name['name']
+
+ self.kind = None
+ if '_' in self.name:
+ kind = 'Slice'
+ elif '.' in self.name:
+ kind='Node'
+ else:
+ kind='Site'
+ self.kind = kind
+
+ def list(self,target,action,*vals):
+ if action == "enabled":
+ print ("\t%s %s %s" % (sys.argv[0],target,action)) + (" %-20s --disable" % self.name)
+ elif action == "exempt":
+ print ("\t%s %s %s" % (sys.argv[0],target,action)) + (" %-20s --expires %s" % ((self.name,)+ vals))
+
+ def enable(self,api,state):
+ if self.kind == 'Slice':
+ # change value of existing slice tag, if it exists.
+ tl = api.GetSliceTags({'name' : self.name, 'tagname' : 'enabled', 'value' : '0' if state else '1'})
+ if len(tl) == 0:
+ api.AddSliceTag(self.name, 'enabled', '1' if state else '0')
+ else:
+ for t in tl:
+ api.UpdateSliceTag(t['slice_tag_id'], {'value' : '1' if state else '0'})
+ elif self.kind == 'Node':
+ if state == True:
+ api.UpdateNode(self.name, {'boot_state' : 'boot'})
+ else:
+ api.UpdateNode(self.name, {'boot_state' : 'disabled'})
+ elif self.kind == 'Site':
+ api.UpdateSite(self.name, {'enabled' : state})
+
+ def exempt(self,api,expires):
+ if expires != None:
+ if self.kind == 'Slice':
+ try: api.AddSliceTag(self.name, 'exempt_slice_until', expires)
+ except: api.UpdateSliceTag(self.name, expires)
+ elif self.kind == 'Node':
+ try: api.AddNodeTag(self.name, 'exempt_node_until', expires)
+ except: api.UpdateNodeTag(self.name, expires)
+ elif self.kind == 'Site':
+ try: api.AddSiteTag(api.GetSites(self.name, ['site_id'])[0]['site_id'], 'exempt_site_until', expires)
+ except: api.UpdateSiteTag(api.GetSiteTags({'login_base' : self.name, 'tagname' : 'exempt_site_until'})[0]['site_tag_id'], expires)
+ else:
+ # remove
+ if self.kind == 'Slice':
+ tag_id_l = api.GetSliceTags({'name' : self.name, 'tagname' : 'exempt_slice_until'}, ['slice_tag_id'])
+ if len(tag_id_l) > 0:
+ tag_id = tag_id_l[0]['slice_tag_id']
+ api.DeleteSliceTag(tag_id)
+ elif self.kind == 'Node':
+ tag_id_l = api.GetNodeTags({'hostname' : self.name, 'tagname' : 'exempt_node_until'}, ['node_tag_id'])
+ if len(tag_id_l) > 0:
+ tag_id = tag_id_l[0]['node_tag_id']
+ api.DeleteNodeTag(tag_id)
+ elif self.kind == 'Site':
+ tag_id_l = api.GetSiteTags({'login_base' : self.name, 'tagname' : 'exempt_site_until'}, ['site_tag_id'])
+ if len(tag_id_l) > 0:
+ tag_id = tag_id_l[0]['site_tag_id']
+ api.DeleteSiteTag(tag_id)
+
+
+def main():
+ from optparse import OptionParser
+ copy = False
+ parser = OptionParser()
+
+ parser.add_option("-d", "--debug", dest="debug", action="store_true", default=False, help="")
+ parser.add_option("-v", "--verbose", dest="verbose", default=False, help="")
+ parser.add_option("-u", "--apiurl", dest="url", default="https://www.planet-lab.org/PLCAPI/", help="Set PLC URL for action")
+ parser.add_option("-U", "--username", dest="username", default=None, help="Login as username")
+ parser.add_option("-P", "--password", dest="password", default=None, help="Use provided password; otherwise prompt for password")
+ parser.add_option("-e", "--expires", dest="expires", default=None, help="Set expiration date YYYYMMDD (or <days>); default is None (i.e. removed)")
+ parser.add_option("", "--disable", dest="disable", default=False, action="store_true", help="Disable object.")
+ parser.add_option("-r", "--remove", dest="remove", action="store_true", default=False, help="Remove object from exemption" )
+ parser.add_option("-l", "--list", dest="list", action="store_true", default=False, help="List objects with command used to generate them")
+ parser.add_option("-S", "--site", dest="login_base", default=None, help="Act on this site")
+ parser.add_option("-H", "--host", dest="hostname", default=None, help="Act on this node")
+ parser.add_option("-s", "--slice", dest="slicename", default=None, help="Act on this site")
+
+ (opt, args) = parser.parse_args()
+ opt.expires = parse_expire_str(opt.expires)
+
+ if len(args) == 0:
+ usage(parser)
+ sys.exit(1)
+
+ target = args[0];
+ api = get_plc_api(target, opt.url, opt.username, opt.password, unparse_expire_str(opt.expires), opt.debug)
+
+ action_list = ['enabled', 'exempt', 'removeall', 'exemptall', 'enableall', 'disableall']
+
+ for i,action in enumerate(args[1:]):
+ if action in action_list:
+ if len(args) > i+2 and args[i+2] not in action_list:
+ objname = args[i+2]
+ else:
+ objname = None
+
+ if action == "enabled":
+
+ if not opt.list and not opt.hostname and not opt.slicename and not opt.login_base:
+ opt.list = True
+ if opt.list:
+ print "Listing only *disabled* objects"
+ sites = api.GetSites({'peer_id' : None, 'enabled': False})
+ nodes = api.GetNodes({'peer_id' : None, 'boot_state' : 'disabled'})
+ slices= api.GetSliceTags({'tagname' : 'enabled'})
+
+ for (header,objlist) in [("Sites:",sites), ("Nodes:", nodes), ("Slices:", slices)]:
+ if len(objlist) > 0: print header
+ for t in objlist:
+ o = PlcObj(t)
+ o.list(target, action)
+
+ if action == "exempt":
+ if not opt.list and not opt.remove and opt.expires == None:
+ opt.list = True
+
+ if opt.list:
+ if objname == None:
+ # NOTE: this works around a bug as of 2011/12/23 that
+ # deleted sites do not also delete all associated site tags.
+ site_lb = [ l['login_base'] for l in api.GetSites({'peer_id' : None}, ['login_base']) ]
+ sites = api.GetSiteTags({'tagname' : 'exempt_site_until'})
+ sites = filter(lambda x: x['login_base'] in site_lb, sites)
+
+ nodes = api.GetNodeTags({'tagname' : 'exempt_node_until'})
+ slices = api.GetSliceTags({'tagname' : 'exempt_slice_until'})
+ else:
+ try: sites = api.GetSiteTags({'login_base': objname, 'tagname' : 'exempt_site_until'})
+ except: sites = []
+ try: nodes = api.GetNodeTags({'hostname' : objname, 'tagname' : 'exempt_node_until'})
+ except: nodes = []
+ try: slices = api.GetSliceTags({'name' : objname, 'tagname' : 'exempt_slice_until'})
+ except: slices = []
+
+ for (header,objlist) in [("Sites:",sites), ("Nodes:", nodes), ("Slices:", slices)]:
+ if len(objlist) > 0: print header
+ for t in objlist:
+ o = PlcObj(t)
+ o.list(target, action, t['value'])
+
+ if opt.remove or opt.expires:
+ obj = PlcObj(objname)
+ # if opt.expires == None, the exemption will be removed.
+ obj.exempt(api,opt.expires)
+
+ if action == "disableall":
+ if objname == None: raise Exception("Provide a site name to disable")
+ # disable site, disable slices,
+ try:
+ slices = api.GetSlices(api.GetSites(objname, ['slice_ids'])[0]['slice_ids'])
+ except:
+ slices = []
+ obj = PlcObj(objname)
+ obj.enable(api,False)
+ for sl in slices:
+ obj = PlcObj(sl['name'])
+ obj.enable(api,False)
+
+ if action == "enableall":
+ # enable site, enable slices,
+ if objname == None: raise Exception("Provide a site name to enableall")
+ try:
+ slices = api.GetSlices(api.GetSites(objname, ['slice_ids'])[0]['slice_ids'])
+ except:
+ slices = []
+ obj = PlcObj(objname)
+ obj.enable(api,True)
+ for sl in slices:
+ obj = PlcObj(sl['name'])
+ obj.enable(api,True)
+
+ if action == "removeall":
+ # remove enable site, enable slices,
+ if objname == None: raise Exception("Provide a site name to remove")
+ try:
+ slices = api.GetSlices(api.GetSites(objname, ['slice_ids'])[0]['slice_ids'])
+ except:
+ slices = []
+ obj = PlcObj(objname)
+ obj.exempt(api,None)
+ for sl in slices:
+ obj = PlcObj(sl['name'])
+ obj.exempt(api,None)
+
+ if action == "exemptall":
+ if objname == None: raise Exception("Provide a site name to exempt")
+ try:
+ slices = api.GetSlices(api.GetSites(objname, ['slice_ids'])[0]['slice_ids'])
+ except:
+ slices = []
+ obj = PlcObj(objname)
+ obj.exempt(api,opt.expires)
+ for sl in slices:
+ obj = PlcObj(sl['name'])
+ obj.exempt(api,opt.expires)
+
+if __name__ == '__main__':
+ main()
diff --git a/web/MonitorWeb/monitorweb/controllers.py b/web/MonitorWeb/monitorweb/controllers.py
index 0f919ab..b368df9 100644
--- a/web/MonitorWeb/monitorweb/controllers.py
+++ b/web/MonitorWeb/monitorweb/controllers.py
@@ -25,6 +25,13 @@ from monitor.wrapper.plccache import plcdb_hn2lb as site_hn2lb
from monitorweb.templates.links import *
+def session_clear_all():
+ session.flush()
+ try:
+ session.expunge_all()
+ except AttributeError: # SQLAlchemy < 0.5.1
+ session.clear()
+
class ObjectQueryFields(widgets.WidgetsList):
"""The WidgetsList defines the fields of the form."""
pass
@@ -389,7 +396,7 @@ class Root(controllers.RootController, MonitorXmlrpcServer, LocalExtensions):
def nodeslow(self, filter='boot'):
print "NODE------------------"
print "befor-len: ", len( [ i for i in session] )
- session.flush(); session.clear()
+ session_clear_all()
print "after-len: ", len( [ i for i in session] )
fbquery = FindbadNodeRecord.get_all_latest()
query = []
@@ -513,7 +520,7 @@ class Root(controllers.RootController, MonitorXmlrpcServer, LocalExtensions):
def pre_view(self, **data):
- session.flush(); session.clear()
+ session_clear_all()
loginbase=None
loginbase_list=[]
@@ -620,7 +627,7 @@ class Root(controllers.RootController, MonitorXmlrpcServer, LocalExtensions):
@expose(template="monitorweb.templates.pcuview")
@exception_handler(nodeaction_handler,"isinstance(tg_exceptions,RuntimeError)")
def pcuviewold(self, loginbase=None, pcuid=None, hostname=None, since=20, **data):
- session.flush(); session.clear()
+ session_clear_all()
sitequery=[]
pcuquery=[]
nodequery=[]
@@ -737,7 +744,7 @@ class Root(controllers.RootController, MonitorXmlrpcServer, LocalExtensions):
def pcu(self, filter='all'):
print "PCUVIEW------------------"
print "befor-len: ", len( [ i for i in session] )
- session.flush(); session.clear()
+ session_clear_all()
print "after-len: ", len( [ i for i in session] )
fbquery = FindbadPCURecord.get_all_latest()
query = []
@@ -775,7 +782,7 @@ class Root(controllers.RootController, MonitorXmlrpcServer, LocalExtensions):
def site(self, filter='all'):
print "SITE------------------"
print "befor-len: ", len( [ i for i in session] )
- session.flush(); session.clear()
+ session_clear_all()
print "after-len: ", len( [ i for i in session] )
filtercount = {'good' : 0, 'down': 0, 'online':0, 'offline' : 0, 'new' : 0, 'pending' : 0, 'all' : 0}
fbquery = HistorySiteRecord.query.all()
diff --git a/web/MonitorWeb/monitorweb/monitor_xmlrpc.py b/web/MonitorWeb/monitorweb/monitor_xmlrpc.py
index 2f46879..7d04c48 100644
--- a/web/MonitorWeb/monitorweb/monitor_xmlrpc.py
+++ b/web/MonitorWeb/monitorweb/monitor_xmlrpc.py
@@ -98,7 +98,10 @@ class MonitorXmlrpcServer(object):
if method is None or not getattr(method, "exposed", False):
raise AssertionError("method does not exist")
- session.clear()
+ try:
+ session.expunge_all()
+ except AttributeError: # SQLAlchemy < 0.5.1
+ session.clear()
# Call the method, convert it into a 1-element tuple
# as expected by dumps
response = method(*params)
< (left) myplc-5.3-3
> (right) myplc
diff --git a/support-scripts/cleanup-zombies.py b/support-scripts/cleanup-zombies.py
new file mode 100755
index 0000000..c3df0ac
--- /dev/null
+++ b/support-scripts/cleanup-zombies.py
@@ -0,0 +1,108 @@
+#!/usr/bin/env python3
+
+# node manager has a few working assumptions, like
+# if a domain d does not exist, there is no /vservers/d
+
+# this utility tries to detect and assess potentially
+# conflictual situations, that could prevent nodemanager
+# from recovering properly
+#
+# the logic is simply to find zombie-containers, i.e.
+# VMs that do have a workdir in /vservers/<zombie>
+# but that are not reported as running by virsh --list
+# which suggests they have been improperly trashed
+###
+#
+# then we trash them but for that some subdirs must be
+# btrfs-subvolume-delete'd and not rm-rf'ed
+#
+
+import subprocess
+import glob
+import os, os.path
+from argparse import ArgumentParser
+
+def running_domains():
+ command = [
+ 'virsh',
+ '-c',
+ 'lxc:///',
+ 'list',
+ '--name',
+ ]
+ names_string = subprocess.check_output(
+ command,
+ universal_newlines = True,
+ stdin = subprocess.DEVNULL,
+ )
+ names = [ name for name in names_string.strip().split("\n") if name ]
+ return names
+
+def existing_vservers():
+ all_dirs = glob.glob("/vservers/*")
+ dirs = ( dir for dir in all_dirs if os.path.isdir(dir) )
+ dirnames = ( path.replace("/vservers/", "") for path in dirs)
+ return dirnames
+
+def display_or_run_commands(commands, run):
+ if commands:
+ if not run:
+ print("========== You should run")
+ for command in commands:
+ print(" ".join(command))
+ else:
+ for command in commands:
+ print("Running {}".format(" ".join(command)))
+ retcod = subprocess.call(command)
+ if retcod != 0:
+ print("Warning: failed with retcod = {}".format(retcod))
+
+def main():
+ parser = ArgumentParser()
+ # the default is to cowardly show commands to run
+ # use --run to actually do it
+ parser.add_argument("-r", "--run", action='store_true', default=False)
+ args = parser.parse_args()
+
+ running_containers = set(running_domains())
+ existing_containers = set(existing_vservers())
+ zombies_containers = existing_containers - running_containers
+
+ # the prefix used to locate subvolumes
+ flavour_prefixes = [
+ 'onelab-',
+ 'lxc-',
+ 'omf-',
+ ]
+
+ # we need to call 'btrfs subvolume delete' on these remainings
+ # instead of just 'rm'
+ if zombies_containers:
+ commands = []
+ zombie_dirs = ["/vservers/"+z for z in zombies_containers]
+ print("-------- Found {} existing, but not running, containers".format(len(zombies_containers)))
+ print("zombie_dirs='{}'".format(" ".join(zombie_dirs)))
+ subvolumes = [ path
+ for z in zombies_containers
+ for prefix in flavour_prefixes
+ for path in glob.glob("/vservers/{z}/{prefix}*".format(z=z, prefix=prefix))]
+ if subvolumes:
+ print("zombie_subvolumes='{}'".format(" ".join(subvolumes)))
+ for subvolume in subvolumes:
+ commands.append([ 'btrfs', 'subvolume', 'delete', subvolume])
+ for zombie_dir in zombie_dirs:
+ commands.append([ 'btrfs', 'subvolume', 'delete', zombie_dir ])
+ display_or_run_commands(commands, args.run)
+ # find the containers dirs that might still exist
+ zombie_dirs = [ path for path in zombie_dirs if os.path.isdir(path) ]
+ commands = [ ['rm', '-rf', path] for path in zombie_dirs ]
+ display_or_run_commands(commands, args.run)
+
+ #### should happen much less frequently
+ weirdos_containers = running_containers - existing_containers
+ if weirdos_containers:
+ print("-------- Found {} running but non existing".format(len(weirdos_containers)))
+ for w in weirdos_containers:
+ print("/vservers/{}".format(w))
+
+main()
< (left) nodeconfig-5.2-5
> (right) nodeconfig
diff --git a/PlanetLabConf/sysctl.conf b/PlanetLabConf/sysctl.conf index ce6f512..52db593 100644 --- a/PlanetLabConf/sysctl.conf +++ b/PlanetLabConf/sysctl.conf @@ -28,3 +28,5 @@ net.ipv4.tcp_rmem = 4096 87380 4194304 net.ipv4.tcp_wmem = 4096 16384 4194304 net.netfilter.nf_conntrack_icmp_timeout = 60 +fs.inotify.max_user_watches = 81920 +fs.inotify.max_user_instances = 1024
< (left) oml-2.6.1-1
> (right) oml
diff --git a/liboml.spec b/liboml.spec
index 76e8a03..3a716e7 100644
--- a/liboml.spec
+++ b/liboml.spec
@@ -73,6 +73,12 @@ popd
rm -fr $RPM_BUILD_ROOT
pushd %{oml_actual_name}
make install DESTDIR=$RPM_BUILD_ROOT
+# for rpm on fedora23 : since we don't do debug packages anymore
+# see build/Makefile
+%if ("%{distro}" == "Fedora" && %{distrorelease} >= 23)
+rm -rf $RPM_BUILD_ROOT/usr/lib/debug
+rm -rf $RPM_BUILD_ROOT/usr/src/debug
+%endif
popd
%clean
< (left) plcapi-5.3-10
> (right) plcapi
diff --git a/PLC/API.py b/PLC/API.py
index 829e759..f09034c 100644
--- a/PLC/API.py
+++ b/PLC/API.py
@@ -13,20 +13,6 @@ import traceback
import string
import xmlrpclib
-import simplejson
-# use this one
-json=simplejson
-#try:
-# # Try to use jsonlib before using simpljson. This is a hack to get around
-# # the fact that the version of simplejson available for f8 is slightly
-# # faster than xmlrpc but not as fast as jsonlib. There is no jsonlib
-# # package available for f8, so this has to be installed manually and
-# # is not expected to always be available. Remove this once we move away
-# # from f8 based MyPLC's
-# import jsonlib
-# json = jsonlib
-#except:
-# json = simplejson
# See "2.2 Characters" in the XML specification:
#
@@ -34,8 +20,13 @@ json=simplejson
# avoiding
# [#x7F-#x84], [#x86-#x9F], [#xFDD0-#xFDDF]
-invalid_xml_ascii = map(chr, range(0x0, 0x8) + [0xB, 0xC] + range(0xE, 0x1F))
-xml_escape_table = string.maketrans("".join(invalid_xml_ascii), "?" * len(invalid_xml_ascii))
+invalid_codepoints = range(0x0, 0x8) + [0xB, 0xC] + range(0xE, 0x1F)
+# broke with f24, somehow we get a unicode as an incoming string to be translated
+str_xml_escape_table = string.maketrans("".join((chr(x) for x in invalid_codepoints)),
+ "?" * len(invalid_codepoints))
+# loosely inspired from
+# http://stackoverflow.com/questions/1324067/how-do-i-get-str-translate-to-work-with-unicode-strings
+unicode_xml_escape_table = { invalid : u"?" for invalid in invalid_codepoints}
def xmlrpclib_escape(s, replace = string.replace):
"""
@@ -50,7 +41,23 @@ def xmlrpclib_escape(s, replace = string.replace):
s = replace(s, ">", ">",)
# Replace invalid 7-bit control characters with '?'
- return s.translate(xml_escape_table)
+ if isinstance(s, str):
+ return s.translate(str_xml_escape_table)
+ else:
+ return s.translate(unicode_xml_escape_table)
+
+def test_xmlrpclib_escape():
+ inputs = [
+ # full ASCII
+ "".join( (chr(x) for x in range(128))),
+ # likewise but as a unicode string up to 256
+ u"".join( (unichr(x) for x in range(256))),
+ ]
+ for input in inputs:
+ print "==================== xmlrpclib_escape INPUT"
+ print type(input), '->', input
+ print "==================== xmlrpclib_escape OUTPUT"
+ print xmlrpclib_escape(input)
def xmlrpclib_dump(self, value, write):
"""
@@ -258,4 +265,6 @@ class PLCAPI:
return json.dumps(result)
-
+# one simple unit test
+if __name__ == '__main__':
+ test_xmlrpclib_escape()
diff --git a/plcapi.spec b/plcapi.spec
index 47b84cb..0cae1a4 100644
--- a/plcapi.spec
+++ b/plcapi.spec
@@ -38,7 +38,7 @@ Requires: python-lxml
%if ("%{distro}" == "Fedora" && %{distrorelease} <= 20) || ("%{distro}" != "Fedora")
Requires: SOAPpy
%endif
-Requires: python-simplejson
+#Requires: python-simplejson
# for the RebootNodeWithPCU method
Requires: pcucontrol >= 1.0-6
# for OMF integration
@@ -61,7 +61,8 @@ Conflicts: MyPLC <= 4.3
# Standard xmlrpc.so that ships with PHP does not marshal NULL
# for building the wsdl interface we used to require PyXML
# but this has gone with f20 so turning this off for now
-BuildRequires: php-devel python-simplejson
+BuildRequires: php-devel
+#BuildRequires: python-simplejson
Obsoletes: php-xmlrpc
Provides: php-xmlrpc
< (left) sfa-3.1-20
> (right) sfa
diff --git a/Makefile b/Makefile
index 45add83..e6aa5a9 100644
--- a/Makefile
+++ b/Makefile
@@ -224,6 +224,8 @@ BINS = ./config/sfa-config-tty ./config/gen-sfa-cm-config.py \
synclib: synccheck
+$(RSYNC) --relative ./sfa/ --exclude migrations $(SSHURL)/usr/lib\*/python2.\*/site-packages/
+synclib3: synccheck
+ +$(RSYNC) --relative ./sfa/ --exclude migrations $(SSHURL)/usr/lib\*/python3.\*/site-packages/
synclibdeb: synccheck
+$(RSYNC) --relative ./sfa/ --exclude migrations $(SSHURL)/usr/share/pyshared/
syncmigrations:
diff --git a/clientbin/getNodes.py b/clientbin/getNodes.py
index d77bbbc..ec2e4a2 100644
--- a/clientbin/getNodes.py
+++ b/clientbin/getNodes.py
@@ -4,18 +4,23 @@ import sys
import os
from optparse import OptionParser
from pprint import pprint
-from types import StringTypes
+
+from sfa.util.py23 import StringType
def create_parser():
command = sys.argv[0]
argv = sys.argv[1:]
usage = "%(command)s [options]" % locals()
description = """getNodes will open a rspec file and print all key/values, or filter results based on a given key or set of keys."""
- parser = OptionParser(usage=usage,description=description)
- parser.add_option("-i", "--infile", dest="infile", default=None, help = "input rspec file")
- parser.add_option("-t", "--tag", dest="tag", default=None, help = "filter rspec for this tag")
- parser.add_option("-a", "--attribute", dest="attribute", default=None, help = "comma separated list of attributes to display")
- parser.add_option("-r", "--recursive", dest="print_children", default=False, action="store_true", help = "print the tag's child nodes")
+ parser = OptionParser(usage=usage, description=description)
+ parser.add_option("-i", "--infile", dest="infile", default=None,
+ help = "input rspec file")
+ parser.add_option("-t", "--tag", dest="tag", default=None,
+ help = "filter rspec for this tag")
+ parser.add_option("-a", "--attribute", dest="attribute", default=None,
+ help = "comma separated list of attributes to display")
+ parser.add_option("-r", "--recursive", dest="print_children", default=False, action="store_true",
+ help = "print the tag's child nodes")
return parser
@@ -31,7 +36,7 @@ def print_dict(rdict, options, counter=1):
if not isinstance(rdict, dict):
raise "%s not a dict" % rdict
for (key, value) in rdict.iteritems():
- if isinstance(value, StringTypes):
+ if isinstance(value, StringType):
if (attributes and key in attributes) or not attributes:
print tab * counter + "%s: %s" % (key, value)
elif isinstance(value, list):
@@ -82,6 +87,6 @@ def main():
if __name__ == '__main__':
try: main()
- except Exception, e:
+ except Exception as e:
raise
print e
diff --git a/clientbin/getRecord.py b/clientbin/getRecord.py
index e2be593..c88f193 100755
--- a/clientbin/getRecord.py
+++ b/clientbin/getRecord.py
@@ -78,8 +78,8 @@ def main():
if __name__ == '__main__':
try: main()
- except ExpatError, e:
+ except ExpatError as e:
print "RecordError. Is your record valid XML?"
print e
- except Exception, e:
+ except Exception as e:
print e
diff --git a/clientbin/setRecord.py b/clientbin/setRecord.py
index 405c90d..aafd95c 100755
--- a/clientbin/setRecord.py
+++ b/clientbin/setRecord.py
@@ -47,9 +47,9 @@ def editDict(args, recordDict, options):
if vect in recordDict:
del recordDict[vect]
else:
- raise TypeError, "Argument error: Records are updated with \n" \
+ raise TypeError("Argument error: Records are updated with \n" \
"key=val1,val2,valN or\n" \
- "key+=val1,val2,valN \n%s Unknown key/val" % vect
+ "key+=val1,val2,valN \n%s Unknown key/val" % vect)
def replaceDict(newval, recordDict, options):
@@ -103,5 +103,5 @@ def main():
if __name__ == '__main__':
try: main()
- except Exception, e:
+ except Exception as e:
print e
diff --git a/clientbin/sfadump.py b/clientbin/sfadump.py
index fb654e7..617635b 100755
--- a/clientbin/sfadump.py
+++ b/clientbin/sfadump.py
@@ -4,7 +4,6 @@ from __future__ import with_statement
import sys
import os, os.path
import tempfile
-from types import StringTypes, ListType
from argparse import ArgumentParser
from sfa.util.sfalogging import logger
diff --git a/clientbin/sfiAddLinks.py b/clientbin/sfiAddLinks.py
index 2e667b1..b16cae6 100755
--- a/clientbin/sfiAddLinks.py
+++ b/clientbin/sfiAddLinks.py
@@ -21,15 +21,15 @@ if not command.opts.linkfile:
sys.exit(1)
if command.opts.infile:
- infile=file(command.opts.infile)
+ infile = open(command.opts.infile)
else:
- infile=sys.stdin
+ infile = sys.stdin
if command.opts.outfile:
- outfile=file(command.opts.outfile,"w")
+ outfile = open(command.opts.outfile, "w")
else:
- outfile=sys.stdout
+ outfile = sys.stdout
ad_rspec = RSpec(infile)
-links = file(command.opts.linkfile).read().split('\n')
+links = open(command.opts.linkfile).read().split('\n')
link_tuples = map(lambda x: tuple(x.split()), links)
version_manager = VersionManager()
diff --git a/clientbin/sfiAddSliver.py b/clientbin/sfiAddSliver.py
index 84ffa8b..6e6042c 100755
--- a/clientbin/sfiAddSliver.py
+++ b/clientbin/sfiAddSliver.py
@@ -21,15 +21,15 @@ if not command.opts.nodefile:
sys.exit(1)
if command.opts.infile:
- infile=file(command.opts.infile)
+ infile = open(command.opts.infile)
else:
- infile=sys.stdin
+ infile = sys.stdin
if command.opts.outfile:
- outfile=file(command.opts.outfile,"w")
+ outfile = open(command.opts.outfile,"w")
else:
- outfile=sys.stdout
+ outfile = sys.stdout
ad_rspec = RSpec(infile)
-nodes = file(command.opts.nodefile).read().split()
+nodes = open(command.opts.nodefile).read().split()
version_manager = VersionManager()
try:
type = ad_rspec.version.type
diff --git a/flashpolicy/sfa_flashpolicy.py b/flashpolicy/sfa_flashpolicy.py
index de98be1..f4d3f16 100644
--- a/flashpolicy/sfa_flashpolicy.py
+++ b/flashpolicy/sfa_flashpolicy.py
@@ -50,7 +50,7 @@ class policy_server(object):
self.sock.bind(('', port))
self.sock.listen(5)
def read_policy(self, path):
- with file(path, 'rb') as f:
+ with open(path, 'rb') as f:
policy = f.read(10001)
if len(policy) > 10000:
raise exceptions.RuntimeError('File probably too large to be a policy file',
@@ -63,7 +63,7 @@ class policy_server(object):
try:
while True:
thread.start_new_thread(self.handle, self.sock.accept())
- except socket.error, e:
+ except socket.error as e:
self.log('Error accepting connection: %s' % (e[1],))
def handle(self, conn, addr):
addrstr = '%s:%s' % (addr[0],addr[1])
@@ -79,9 +79,9 @@ class policy_server(object):
self.log('Valid request received from %s' % (addrstr,))
conn.sendall(self.policy)
self.log('Sent policy file to %s' % (addrstr,))
- except socket.error, e:
+ except socket.error as e:
self.log('Error handling connection from %s: %s' % (addrstr, e[1]))
- except Exception, e:
+ except Exception as e:
self.log('Error handling connection from %s: %s' % (addrstr, e[1]))
def log(self, str):
print >>sys.stderr, str
@@ -105,7 +105,7 @@ def main():
if opts.daemon:
daemon()
policy_server(opts.port, opts.path).run()
- except Exception, e:
+ except Exception as e:
print >> sys.stderr, e
sys.exit(1)
except KeyboardInterrupt:
diff --git a/sfa.spec b/sfa.spec
index 39aa494..047de43 100644
--- a/sfa.spec
+++ b/sfa.spec
@@ -33,8 +33,6 @@ Requires: python-psycopg2
# f8=0.4 - f12=0.5 f14=0.6 f16=0.7
Requires: python-sqlalchemy
Requires: python-migrate
-# the eucalyptus aggregate uses this module
-#Requires: python-xmlbuilder
# for uuidgen - used in db password generation
# on f8 this actually comes with e2fsprogs, go figure
Requires: util-linux-ng
diff --git a/sfa/client/candidates.py b/sfa/client/candidates.py
index f3a99ae..830e24d 100644
--- a/sfa/client/candidates.py
+++ b/sfa/client/candidates.py
@@ -1,3 +1,5 @@
+from __future__ import print_function
+
### utility to match command-line args to names
class Candidates:
def __init__ (self, names):
@@ -43,8 +45,8 @@ def test_candidates ():
names=names.split()
for (input,expected) in tuples:
got=Candidates(names).only_match(input)
- if got==expected: print '.',
- else: print 'X FAIL','names[',names,'] input',input,'expected',expected,'got',got
+ if got==expected: print('.', end=' ')
+ else: print('X FAIL','names[',names,'] input',input,'expected',expected,'got',got)
if __name__ == '__main__':
test_candidates()
diff --git a/sfa/client/client_helper.py b/sfa/client/client_helper.py
index d9239ad..6e91706 100644
--- a/sfa/client/client_helper.py
+++ b/sfa/client/client_helper.py
@@ -41,7 +41,7 @@ def sfa_users_arg (records, slice_record):
extra_fields = list ( set(pl_fields).union(set(nitos_fields)))
# try to fill all these in
for field in extra_fields:
- if record.has_key(field): user[field]=record[field]
+ if field in record: user[field]=record[field]
users.append(user)
return users
diff --git a/sfa/client/common.py b/sfa/client/common.py
index 52ae3ee..16a0694 100644
--- a/sfa/client/common.py
+++ b/sfa/client/common.py
@@ -1,5 +1,7 @@
# a few utilities common to sfi and sfaadmin
+from __future__ import print_function
+
def optparse_listvalue_callback(option, opt, value, parser):
former=getattr(parser.values,option.dest)
if not former: former=[]
@@ -41,37 +43,37 @@ def terminal_render_plural (how_many, name,names=None):
else: return "%d %s"%(how_many,names)
def terminal_render_default (record,options):
- print "%s (%s)" % (record['hrn'], record['type'])
+ print("%s (%s)" % (record['hrn'], record['type']))
def terminal_render_user (record, options):
- print "%s (User)"%record['hrn'],
+ print("%s (User)"%record['hrn'], end=' ')
if options.verbose and record.get('email', None):
- print "email='{}'".format(record['email']),
+ print("email='{}'".format(record['email']), end=' ')
if record.get('reg-pi-authorities', None):
- print " [PI at %s]"%(" and ".join(record['reg-pi-authorities'])),
+ print(" [PI at %s]"%(" and ".join(record['reg-pi-authorities'])), end=' ')
if record.get('reg-slices', None):
- print " [IN slices %s]"%(" and ".join(record['reg-slices'])),
+ print(" [IN slices %s]"%(" and ".join(record['reg-slices'])), end=' ')
user_keys=record.get('reg-keys',[])
if not options.verbose:
- print " [has %s]"%(terminal_render_plural(len(user_keys),"key"))
+ print(" [has %s]"%(terminal_render_plural(len(user_keys),"key")))
else:
- print ""
- for key in user_keys: print 8*' ',key.strip("\n")
+ print("")
+ for key in user_keys: print(8*' ',key.strip("\n"))
def terminal_render_slice (record, options):
- print "%s (Slice)"%record['hrn'],
+ print("%s (Slice)"%record['hrn'], end=' ')
if record.get('reg-researchers', None):
- print " [USERS %s]"%(" and ".join(record['reg-researchers'])),
+ print(" [USERS %s]"%(" and ".join(record['reg-researchers'])), end=' ')
# print record.keys()
- print ""
+ print("")
def terminal_render_authority (record, options):
- print "%s (Authority)"%record['hrn'],
+ print("%s (Authority)"%record['hrn'], end=' ')
if options.verbose and record.get('name'):
- print "name='{}'".format(record['name'])
+ print("name='{}'".format(record['name']))
if record.get('reg-pis', None):
- print " [PIS %s]"%(" and ".join(record['reg-pis'])),
- print ""
+ print(" [PIS %s]"%(" and ".join(record['reg-pis'])), end=' ')
+ print("")
def terminal_render_node (record, options):
- print "%s (Node)"%record['hrn']
+ print("%s (Node)"%record['hrn'])
### used in sfi list
diff --git a/sfa/client/manifolduploader.py b/sfa/client/manifolduploader.py
index c291a42..3b9de16 100755
--- a/sfa/client/manifolduploader.py
+++ b/sfa/client/manifolduploader.py
@@ -30,9 +30,10 @@ import ssl
try: turn_off_server_verify = { 'context' : ssl._create_unverified_context() }
except: turn_off_server_verify = {}
-import xmlrpclib
import getpass
+from sfa.util.py23 import xmlrpc_client
+
class ManifoldUploader:
"""A utility class for uploading delegated credentials to a manifold/MySlice infrastructure"""
@@ -79,12 +80,12 @@ class ManifoldUploader:
# if not self._proxy:
# url=self.url()
# self.logger.info("Connecting manifold url %s"%url)
-# self._proxy = xmlrpclib.ServerProxy(url, allow_none = True)
+# self._proxy = xmlrpc_client.ServerProxy(url, allow_none = True)
# return self._proxy
url=self.url()
self.logger.debug("Connecting manifold url %s"%url)
- proxy = xmlrpclib.ServerProxy(url, allow_none = True,
- **turn_off_server_verify)
+ proxy = xmlrpc_client.ServerProxy(url, allow_none = True,
+ **turn_off_server_verify)
return proxy
@@ -113,7 +114,7 @@ class ManifoldUploader:
try:
self.logger.debug("Using new v2 method forward+annotation@%s %s"%(platform,message))
retcod2=manifold.forward (query, annotation)
- except Exception,e:
+ except Exception as e:
# xxx we need a constant constant for UNKNOWN, how about using 1
MANIFOLD_UNKNOWN=1
retcod2={'code':MANIFOLD_UNKNOWN,'description':"%s"%e}
@@ -129,7 +130,7 @@ class ManifoldUploader:
self.logger.debug("****** full retcod2")
for (k,v) in retcod2.items(): self.logger.debug("**** %s: %s"%(k,v))
return False
- except Exception, e:
+ except Exception as e:
if message: self.logger.error("Could not upload %s %s"%(message,e))
else: self.logger.error("Could not upload credential %s"%e)
if self.logger.debugEnabled():
@@ -163,7 +164,7 @@ def main ():
logger=sfi_logger)
for filename in args.credential_files:
- with file(filename) as f:
+ with open(filename) as f:
result=uploader.upload (f.read(),filename)
sfi_logger.info('... result=%s'%result)
diff --git a/sfa/client/multiclient.py b/sfa/client/multiclient.py
index 75573ed..6bdf2b2 100644
--- a/sfa/client/multiclient.py
+++ b/sfa/client/multiclient.py
@@ -1,3 +1,5 @@
+from __future__ import print_function
+
import threading
import traceback
import time
@@ -15,7 +17,7 @@ def ThreadedMethod(callable, results, errors):
def run(self):
try:
results.put(callable(*args, **kwds))
- except Exception, e:
+ except Exception as e:
logger.log_exc('MultiClient: Error in thread: ')
errors.put(traceback.format_exc())
@@ -95,14 +97,14 @@ if __name__ == '__main__':
def f(name, n, sleep=1):
nums = []
for i in range(n, n+5):
- print "%s: %s" % (name, i)
+ print("%s: %s" % (name, i))
nums.append(i)
time.sleep(sleep)
return nums
def e(name, n, sleep=1):
nums = []
for i in range(n, n+3) + ['n', 'b']:
- print "%s: 1 + %s:" % (name, i)
+ print("%s: 1 + %s:" % (name, i))
nums.append(i + 1)
time.sleep(sleep)
return nums
diff --git a/sfa/client/return_value.py b/sfa/client/return_value.py
index 034dcd8..c2c4f47 100644
--- a/sfa/client/return_value.py
+++ b/sfa/client/return_value.py
@@ -16,7 +16,7 @@ class ReturnValue(dict):
@staticmethod
def get_key_value(key, return_value):
- if isinstance(return_value, dict) and return_value.has_key(key):
+ if isinstance(return_value, dict) and key in return_value:
return return_value.get(key)
else:
return return_value
diff --git a/sfa/client/sfaadmin.py b/sfa/client/sfaadmin.py
index a21cb5f..49757b3 100755
--- a/sfa/client/sfaadmin.py
+++ b/sfa/client/sfaadmin.py
@@ -1,4 +1,6 @@
#!/usr/bin/python
+from __future__ import print_function
+
import os
import sys
import copy
@@ -130,15 +132,15 @@ class RegistryCommands(Commands):
hrn = Xrn(xrn).get_hrn()
db_query = db_query.filter_by(hrn=hrn)
elif all and xrn:
- print "Use either -a or -x <xrn>, not both !!!"
+ print("Use either -a or -x <xrn>, not both !!!")
sys.exit(1)
elif not all and not xrn:
- print "Use either -a or -x <xrn>, one of them is mandatory !!!"
+ print("Use either -a or -x <xrn>, one of them is mandatory !!!")
sys.exit(1)
records = db_query.all()
if not records:
- print "No Record found"
+ print("No Record found")
sys.exit(1)
OK = []
@@ -171,15 +173,15 @@ class RegistryCommands(Commands):
NOK.append(record.hrn)
if not verbose:
- print "Users NOT having a PubKey: %s\n\
+ print("Users NOT having a PubKey: %s\n\
Users having a non RSA PubKey: %s\n\
Users having a GID/PubKey correpondence OK: %s\n\
-Users having a GID/PubKey correpondence Not OK: %s\n"%(len(NOKEY), len(ERROR), len(OK), len(NOK))
+Users having a GID/PubKey correpondence Not OK: %s\n"%(len(NOKEY), len(ERROR), len(OK), len(NOK)))
else:
- print "Users NOT having a PubKey: %s and are: \n%s\n\n\
+ print("Users NOT having a PubKey: %s and are: \n%s\n\n\
Users having a non RSA PubKey: %s and are: \n%s\n\n\
Users having a GID/PubKey correpondence OK: %s and are: \n%s\n\n\
-Users having a GID/PubKey correpondence NOT OK: %s and are: \n%s\n\n"%(len(NOKEY),NOKEY, len(ERROR), ERROR, len(OK), OK, len(NOK), NOK)
+Users having a GID/PubKey correpondence NOT OK: %s and are: \n%s\n\n"%(len(NOKEY),NOKEY, len(ERROR), ERROR, len(OK), OK, len(NOK), NOK))
@@ -252,7 +254,7 @@ Users having a GID/PubKey correpondence NOT OK: %s and are: \n%s\n\n"%(len(NOKEY
def credential(self, xrn, type=None):
"""Invoke GetCredential"""
cred = self.api.manager.GetCredential(self.api, xrn, type, self.api.hrn)
- print cred
+ print(cred)
def import_registry(self):
@@ -337,7 +339,7 @@ class CertCommands(Commands):
auth_info = hierarchy.get_auth_info(hrn)
gid = auth_info.gid_object
except:
- print "Record: %s not found" % hrn
+ print("Record: %s not found" % hrn)
sys.exit(1)
# save to file
if not outfile:
@@ -349,7 +351,7 @@ class CertCommands(Commands):
"""Print contents of a GID file"""
gid_path = os.path.abspath(gidfile)
if not gid_path or not os.path.isfile(gid_path):
- print "No such gid file: %s" % gidfile
+ print("No such gid file: %s" % gidfile)
sys.exit(1)
gid = GID(filename=gid_path)
gid.dump(dump_parents=True)
@@ -377,9 +379,9 @@ class AggregateCommands(Commands):
def resources(self, rspec_version='GENI'):
"""Display the available resources at an aggregate"""
options = {'geni_rspec_version': rspec_version}
- print options
+ print(options)
resources = self.api.manager.ListResources(self.api, [], options)
- print resources
+ print(resources)
@add_options('-x', '--xrn', dest='xrn', metavar='<xrn>', help='slice hrn/urn (mandatory)')
@@ -391,7 +393,7 @@ class AggregateCommands(Commands):
rspec_string = open(rspec).read()
options={}
manifest = self.api.manager.Allocate(self.api, slice_urn, [], rspec_string, options)
- print manifest
+ print(manifest)
@add_options('-x', '--xrn', dest='xrn', metavar='<xrn>', help='slice hrn/urn (mandatory)')
@@ -401,7 +403,7 @@ class AggregateCommands(Commands):
slice_urn=xrn.get_urn()
options={}
manifest = self.api.manager.provision(self.api, [slice_urn], [], options)
- print manifest
+ print(manifest)
@@ -433,14 +435,14 @@ class SfaAdmin:
return (full_name,SfaAdmin.CATEGORIES[full_name])
def summary_usage (self, category=None):
- print "Usage:", self.script_name + " category command [<options>]"
+ print("Usage:", self.script_name + " category command [<options>]")
if category and category in SfaAdmin.CATEGORIES:
categories=[category]
else:
categories=SfaAdmin.CATEGORIES
for c in categories:
cls=SfaAdmin.CATEGORIES[c]
- print "==================== category=%s"%c
+ print("==================== category=%s"%c)
names=cls.__dict__.keys()
names.sort()
for name in names:
@@ -448,15 +450,15 @@ class SfaAdmin:
if name.startswith('_'): continue
margin=15
format="%%-%ds"%margin
- print "%-15s"%name,
+ print("%-15s"%name, end=' ')
doc=getattr(method,'__doc__',None)
if not doc:
- print "<missing __doc__>"
+ print("<missing __doc__>")
continue
lines=[line.strip() for line in doc.split("\n")]
line1=lines.pop(0)
- print line1
- for extra_line in lines: print margin*" ",extra_line
+ print(line1)
+ for extra_line in lines: print(margin*" ",extra_line)
sys.exit(2)
def main(self):
@@ -509,14 +511,14 @@ class SfaAdmin:
command(*cmd_args, **cmd_kwds)
sys.exit(0)
except TypeError:
- print "Possible wrong number of arguments supplied"
+ print("Possible wrong number of arguments supplied")
#import traceback
#traceback.print_exc()
- print command.__doc__
+ print(command.__doc__)
parser.print_help()
sys.exit(1)
#raise
except Exception:
- print "Command failed, please check log for more info"
+ print("Command failed, please check log for more info")
raise
sys.exit(1)
diff --git a/sfa/client/sfaclientlib.py b/sfa/client/sfaclientlib.py
index bc4a1d1..b7114be 100644
--- a/sfa/client/sfaclientlib.py
+++ b/sfa/client/sfaclientlib.py
@@ -1,14 +1,16 @@
# Thierry Parmentelat -- INRIA
-#
-# a minimal library for writing "lightweight" SFA clients
-#
+"""
+a minimal library for writing "lightweight" SFA clients
+"""
+
+from __future__ import print_function
# xxx todo
# this library should probably check for the expiration date of the various
# certificates and automatically retrieve fresh ones when expired
import sys
-import os,os.path
+import os, os.path
import subprocess
from datetime import datetime
from sfa.util.xrn import Xrn
@@ -100,90 +102,91 @@ from sfa.trust.gid import GID
#
####################
-class SfaClientException (Exception): pass
+class SfaClientException(Exception): pass
class SfaClientBootstrap:
# dir is mandatory but defaults to '.'
- def __init__ (self, user_hrn, registry_url, dir=None,
- verbose=False, timeout=None, logger=None):
- self.hrn=user_hrn
- self.registry_url=registry_url
- if dir is None: dir="."
- self.dir=dir
- self.verbose=verbose
- self.timeout=timeout
+ def __init__(self, user_hrn, registry_url, dir=None,
+ verbose=False, timeout=None, logger=None):
+ self.hrn = user_hrn
+ self.registry_url = registry_url
+ if dir is None:
+ dir="."
+ self.dir = dir
+ self.verbose = verbose
+ self.timeout = timeout
# default for the logger is to use the global sfa logger
if logger is None:
logger = sfa.util.sfalogging.logger
- self.logger=logger
+ self.logger = logger
######################################## *_produce methods
### step1
# unconditionnally create a self-signed certificate
- def self_signed_cert_produce (self, output):
+ def self_signed_cert_produce(self, output):
self.assert_private_key()
private_key_filename = self.private_key_filename()
- keypair=Keypair(filename=private_key_filename)
- self_signed = Certificate (subject = self.hrn)
- self_signed.set_pubkey (keypair)
- self_signed.set_issuer (keypair, self.hrn)
- self_signed.sign ()
- self_signed.save_to_file (output)
- self.logger.debug("SfaClientBootstrap: Created self-signed certificate for %s in %s"%\
- (self.hrn, output))
+ keypair = Keypair(filename=private_key_filename)
+ self_signed = Certificate(subject = self.hrn)
+ self_signed.set_pubkey(keypair)
+ self_signed.set_issuer(keypair, self.hrn)
+ self_signed.sign()
+ self_signed.save_to_file(output)
+ self.logger.debug("SfaClientBootstrap: Created self-signed certificate for {} in {}"
+ .format(self.hrn, output))
return output
### step2
# unconditionnally retrieve my credential (GetSelfCredential)
# we always use the self-signed-cert as the SSL cert
- def my_credential_produce (self, output):
+ def my_credential_produce(self, output):
self.assert_self_signed_cert()
certificate_filename = self.self_signed_cert_filename()
- certificate_string = self.plain_read (certificate_filename)
+ certificate_string = self.plain_read(certificate_filename)
self.assert_private_key()
- registry_proxy = SfaServerProxy (self.registry_url,
- self.private_key_filename(),
- certificate_filename)
+ registry_proxy = SfaServerProxy(self.registry_url,
+ self.private_key_filename(),
+ certificate_filename)
try:
- credential_string=registry_proxy.GetSelfCredential (certificate_string, self.hrn, "user")
+ credential_string = registry_proxy.GetSelfCredential(certificate_string, self.hrn, "user")
except:
# some urns hrns may replace non hierarchy delimiters '.' with an '_' instead of escaping the '.'
hrn = Xrn(self.hrn).get_hrn().replace('\.', '_')
- credential_string=registry_proxy.GetSelfCredential (certificate_string, hrn, "user")
- self.plain_write (output, credential_string)
- self.logger.debug("SfaClientBootstrap: Wrote result of GetSelfCredential in %s"%output)
+ credential_string = registry_proxy.GetSelfCredential(certificate_string, hrn, "user")
+ self.plain_write(output, credential_string)
+ self.logger.debug("SfaClientBootstrap: Wrote result of GetSelfCredential in {}".format(output))
return output
### step3
# unconditionnally retrieve my GID - use the general form
- def my_gid_produce (self,output):
- return self.gid_produce (output, self.hrn, "user")
+ def my_gid_produce(self, output):
+ return self.gid_produce(output, self.hrn, "user")
### retrieve any credential (GetCredential) unconditionnal form
# we always use the GID as the SSL cert
- def credential_produce (self, output, hrn, type):
+ def credential_produce(self, output, hrn, type):
self.assert_my_gid()
certificate_filename = self.my_gid_filename()
self.assert_private_key()
- registry_proxy = SfaServerProxy (self.registry_url, self.private_key_filename(),
- certificate_filename)
+ registry_proxy = SfaServerProxy(self.registry_url, self.private_key_filename(),
+ certificate_filename)
self.assert_my_credential()
my_credential_string = self.my_credential_string()
- credential_string=registry_proxy.GetCredential (my_credential_string, hrn, type)
- self.plain_write (output, credential_string)
- self.logger.debug("SfaClientBootstrap: Wrote result of GetCredential in %s"%output)
+ credential_string = registry_proxy.GetCredential(my_credential_string, hrn, type)
+ self.plain_write(output, credential_string)
+ self.logger.debug("SfaClientBootstrap: Wrote result of GetCredential in {}".format(output))
return output
- def slice_credential_produce (self, output, hrn):
- return self.credential_produce (output, hrn, "slice")
+ def slice_credential_produce(self, output, hrn):
+ return self.credential_produce(output, hrn, "slice")
- def authority_credential_produce (self, output, hrn):
- return self.credential_produce (output, hrn, "authority")
+ def authority_credential_produce(self, output, hrn):
+ return self.credential_produce(output, hrn, "authority")
- ### retrieve any gid (Resolve) - unconditionnal form
+ ### retrieve any gid(Resolve) - unconditionnal form
# use my GID when available as the SSL cert, otherwise the self-signed
- def gid_produce (self, output, hrn, type ):
+ def gid_produce(self, output, hrn, type ):
try:
self.assert_my_gid()
certificate_filename = self.my_gid_filename()
@@ -192,16 +195,16 @@ class SfaClientBootstrap:
certificate_filename = self.self_signed_cert_filename()
self.assert_private_key()
- registry_proxy = SfaServerProxy (self.registry_url, self.private_key_filename(),
- certificate_filename)
- credential_string=self.plain_read (self.my_credential())
- records = registry_proxy.Resolve (hrn, credential_string)
- records=[record for record in records if record['type']==type]
+ registry_proxy = SfaServerProxy(self.registry_url, self.private_key_filename(),
+ certificate_filename)
+ credential_string = self.plain_read(self.my_credential())
+ records = registry_proxy.Resolve(hrn, credential_string)
+ records = [record for record in records if record['type'] == type]
if not records:
- raise RecordNotFound, "hrn %s (%s) unknown to registry %s"%(hrn,type,self.registry_url)
- record=records[0]
- self.plain_write (output, record['gid'])
- self.logger.debug("SfaClientBootstrap: Wrote GID for %s (%s) in %s"% (hrn,type,output))
+ raise RecordNotFound("hrn {} ({}) unknown to registry {}".format(hrn, type, self.registry_url))
+ record = records[0]
+ self.plain_write(output, record['gid'])
+ self.logger.debug("SfaClientBootstrap: Wrote GID for {} ({}) in {}".format(hrn, type, output))
return output
@@ -209,17 +212,17 @@ class SfaClientBootstrap:
### produce a pkcs12 bundled certificate from GID and private key
# xxx for now we put a hard-wired password that's just, well, 'password'
# when leaving this empty on the mac, result can't seem to be loaded in keychain..
- def my_pkcs12_produce (self, filename):
- password=raw_input("Enter password for p12 certificate: ")
- openssl_command=['openssl', 'pkcs12', "-export"]
- openssl_command += [ "-password", "pass:%s"%password ]
+ def my_pkcs12_produce(self, filename):
+ password = raw_input("Enter password for p12 certificate: ")
+ openssl_command = ['openssl', 'pkcs12', "-export"]
+ openssl_command += [ "-password", "pass:{}".format(password) ]
openssl_command += [ "-inkey", self.private_key_filename()]
openssl_command += [ "-in", self.my_gid_filename()]
openssl_command += [ "-out", filename ]
- if subprocess.call(openssl_command) ==0:
- print "Successfully created %s"%filename
+ if subprocess.call(openssl_command) == 0:
+ print("Successfully created {}".format(filename))
else:
- print "Failed to create %s"%filename
+ print("Failed to create {}".format(filename))
# Returns True if credential file is valid. Otherwise return false.
def validate_credential(self, filename):
@@ -234,170 +237,168 @@ class SfaClientBootstrap:
#################### public interface
# return my_gid, run all missing steps in the bootstrap sequence
- def bootstrap_my_gid (self):
+ def bootstrap_my_gid(self):
self.self_signed_cert()
self.my_credential()
return self.my_gid()
# once we've bootstrapped we can use this object to issue any other SFA call
# always use my gid
- def server_proxy (self, url):
+ def server_proxy(self, url):
self.assert_my_gid()
- return SfaServerProxy (url, self.private_key_filename(), self.my_gid_filename(),
- verbose=self.verbose, timeout=self.timeout)
+ return SfaServerProxy(url, self.private_key_filename(), self.my_gid_filename(),
+ verbose=self.verbose, timeout=self.timeout)
# now in some cases the self-signed is enough
- def server_proxy_simple (self, url):
+ def server_proxy_simple(self, url):
self.assert_self_signed_cert()
- return SfaServerProxy (url, self.private_key_filename(), self.self_signed_cert_filename(),
- verbose=self.verbose, timeout=self.timeout)
+ return SfaServerProxy(url, self.private_key_filename(), self.self_signed_cert_filename(),
+ verbose=self.verbose, timeout=self.timeout)
# this method can optionnally be invoked to ensure proper
# installation of the private key that belongs to this user
# installs private_key in working dir with expected name -- preserve mode
# typically user_private_key would be ~/.ssh/id_rsa
# xxx should probably check the 2 files are identical
- def init_private_key_if_missing (self, user_private_key):
- private_key_filename=self.private_key_filename()
- if not os.path.isfile (private_key_filename):
- key=self.plain_read(user_private_key)
+ def init_private_key_if_missing(self, user_private_key):
+ private_key_filename = self.private_key_filename()
+ if not os.path.isfile(private_key_filename):
+ key = self.plain_read(user_private_key)
self.plain_write(private_key_filename, key)
- os.chmod(private_key_filename,os.stat(user_private_key).st_mode)
- self.logger.debug("SfaClientBootstrap: Copied private key from %s into %s"%\
- (user_private_key,private_key_filename))
+ os.chmod(private_key_filename, os.stat(user_private_key).st_mode)
+ self.logger.debug("SfaClientBootstrap: Copied private key from {} into {}"
+ .format(user_private_key, private_key_filename))
#################### private details
# stupid stuff
- def fullpath (self, file): return os.path.join (self.dir,file)
+ def fullpath(self, file):
+ return os.path.join(self.dir, file)
# the expected filenames for the various pieces
- def private_key_filename (self):
- return self.fullpath ("%s.pkey" % Xrn.unescape(self.hrn))
- def self_signed_cert_filename (self):
- return self.fullpath ("%s.sscert"%self.hrn)
- def my_credential_filename (self):
- return self.credential_filename (self.hrn, "user")
+ def private_key_filename(self):
+ return self.fullpath("{}.pkey".format(Xrn.unescape(self.hrn)))
+ def self_signed_cert_filename(self):
+ return self.fullpath("{}.sscert".format(self.hrn))
+ def my_credential_filename(self):
+ return self.credential_filename(self.hrn, "user")
# the tests use sfi -u <pi-user>; meaning that the slice credential filename
# needs to keep track of the user too
- def credential_filename (self, hrn, type):
+ def credential_filename(self, hrn, type):
if type in ['user']:
- basename="%s.%s.cred"%(hrn,type)
+ basename = "{}.{}.cred".format(hrn, type)
else:
- basename="%s-%s.%s.cred"%(self.hrn,hrn,type)
- return self.fullpath (basename)
- def slice_credential_filename (self, hrn):
- return self.credential_filename(hrn,'slice')
- def authority_credential_filename (self, hrn):
- return self.credential_filename(hrn,'authority')
- def my_gid_filename (self):
- return self.gid_filename (self.hrn, "user")
- def gid_filename (self, hrn, type):
- return self.fullpath ("%s.%s.gid"%(hrn,type))
- def my_pkcs12_filename (self):
- return self.fullpath ("%s.p12"%self.hrn)
+ basename = "{}-{}.{}.cred".format(self.hrn, hrn, type)
+ return self.fullpath(basename)
+ def slice_credential_filename(self, hrn):
+ return self.credential_filename(hrn, 'slice')
+ def authority_credential_filename(self, hrn):
+ return self.credential_filename(hrn, 'authority')
+ def my_gid_filename(self):
+ return self.gid_filename(self.hrn, "user")
+ def gid_filename(self, hrn, type):
+ return self.fullpath("{}.{}.gid".format(hrn, type))
+ def my_pkcs12_filename(self):
+ return self.fullpath("{}.p12".format(self.hrn))
# optimizing dependencies
# originally we used classes GID or Credential or Certificate
# like e.g.
# return Credential(filename=self.my_credential()).save_to_string()
# but in order to make it simpler to other implementations/languages..
- def plain_read (self, filename):
- infile=file(filename,"r")
- result=infile.read()
- infile.close()
- return result
-
- def plain_write (self, filename, contents):
- outfile=file(filename,"w")
- result=outfile.write(contents)
- outfile.close()
-
- def assert_filename (self, filename, kind):
- if not os.path.isfile (filename):
- raise IOError,"Missing %s file %s"%(kind,filename)
+ def plain_read(self, filename):
+ with open(filename) as infile:
+ return infile.read()
+
+ def plain_write(self, filename, contents):
+ with open(filename, "w") as outfile:
+ outfile.write(contents)
+
+ def assert_filename(self, filename, kind):
+ if not os.path.isfile(filename):
+ raise IOError("Missing {} file {}".format(kind, filename))
return True
- def assert_private_key (self):
- return self.assert_filename (self.private_key_filename(),"private key")
- def assert_self_signed_cert (self):
- return self.assert_filename (self.self_signed_cert_filename(),"self-signed certificate")
- def assert_my_credential (self):
- return self.assert_filename (self.my_credential_filename(),"user's credential")
- def assert_my_gid (self):
- return self.assert_filename (self.my_gid_filename(),"user's GID")
+ def assert_private_key(self):
+ return self.assert_filename(self.private_key_filename(), "private key")
+ def assert_self_signed_cert(self):
+ return self.assert_filename(self.self_signed_cert_filename(), "self-signed certificate")
+ def assert_my_credential(self):
+ return self.assert_filename(self.my_credential_filename(), "user's credential")
+ def assert_my_gid(self):
+ return self.assert_filename(self.my_gid_filename(), "user's GID")
# decorator to make up the other methods
- def get_or_produce (filename_method, produce_method, validate_method=None):
+ def get_or_produce(filename_method, produce_method, validate_method=None):
# default validator returns true
- def wrap (f):
- def wrapped (self, *args, **kw):
- filename=filename_method (self, *args, **kw)
- if os.path.isfile ( filename ):
+ def wrap(f):
+ def wrapped(self, *args, **kw):
+ filename = filename_method(self, *args, **kw)
+ if os.path.isfile(filename):
if not validate_method:
return filename
elif validate_method(self, filename):
return filename
else:
# remove invalid file
- self.logger.warning ("Removing %s - has expired"%filename)
+ self.logger.warning("Removing {} - has expired".format(filename))
os.unlink(filename)
try:
- produce_method (self, filename, *args, **kw)
+ produce_method(self, filename, *args, **kw)
return filename
except IOError:
raise
except :
error = sys.exc_info()[:2]
- message="Could not produce/retrieve %s (%s -- %s)"%\
- (filename,error[0],error[1])
+ message = "Could not produce/retrieve {} ({} -- {})"\
+ .format(filename, error[0], error[1])
self.logger.log_exc(message)
- raise Exception, message
+ raise Exception(message)
return wrapped
return wrap
- @get_or_produce (self_signed_cert_filename, self_signed_cert_produce)
- def self_signed_cert (self): pass
+ @get_or_produce(self_signed_cert_filename, self_signed_cert_produce)
+ def self_signed_cert(self): pass
- @get_or_produce (my_credential_filename, my_credential_produce, validate_credential)
- def my_credential (self): pass
+ @get_or_produce(my_credential_filename, my_credential_produce, validate_credential)
+ def my_credential(self): pass
- @get_or_produce (my_gid_filename, my_gid_produce)
- def my_gid (self): pass
+ @get_or_produce(my_gid_filename, my_gid_produce)
+ def my_gid(self): pass
- @get_or_produce (my_pkcs12_filename, my_pkcs12_produce)
- def my_pkcs12 (self): pass
+ @get_or_produce(my_pkcs12_filename, my_pkcs12_produce)
+ def my_pkcs12(self): pass
- @get_or_produce (credential_filename, credential_produce, validate_credential)
- def credential (self, hrn, type): pass
+ @get_or_produce(credential_filename, credential_produce, validate_credential)
+ def credential(self, hrn, type): pass
- @get_or_produce (slice_credential_filename, slice_credential_produce, validate_credential)
- def slice_credential (self, hrn): pass
+ @get_or_produce(slice_credential_filename, slice_credential_produce, validate_credential)
+ def slice_credential(self, hrn): pass
- @get_or_produce (authority_credential_filename, authority_credential_produce, validate_credential)
- def authority_credential (self, hrn): pass
+ @get_or_produce(authority_credential_filename, authority_credential_produce, validate_credential)
+ def authority_credential(self, hrn): pass
- @get_or_produce (gid_filename, gid_produce)
- def gid (self, hrn, type ): pass
+ @get_or_produce(gid_filename, gid_produce)
+ def gid(self, hrn, type ): pass
# get the credentials as strings, for inserting as API arguments
- def my_credential_string (self):
+ def my_credential_string(self):
self.my_credential()
return self.plain_read(self.my_credential_filename())
- def slice_credential_string (self, hrn):
+ def slice_credential_string(self, hrn):
self.slice_credential(hrn)
return self.plain_read(self.slice_credential_filename(hrn))
- def authority_credential_string (self, hrn):
+ def authority_credential_string(self, hrn):
self.authority_credential(hrn)
return self.plain_read(self.authority_credential_filename(hrn))
# for consistency
- def private_key (self):
+ def private_key(self):
self.assert_private_key()
return self.private_key_filename()
- def delegate_credential_string (self, original_credential, to_hrn, to_type='authority'):
+ def delegate_credential_string(self, original_credential, to_hrn, to_type='authority'):
"""
sign a delegation credential to someone else
@@ -412,22 +413,23 @@ class SfaClientBootstrap:
and uses Credential.delegate()"""
# the gid and hrn of the object we are delegating
- if isinstance (original_credential, str):
- original_credential = Credential (string=original_credential)
+ if isinstance(original_credential, str):
+ original_credential = Credential(string=original_credential)
original_gid = original_credential.get_gid_object()
original_hrn = original_gid.get_hrn()
if not original_credential.get_privileges().get_all_delegate():
- self.logger.error("delegate_credential_string: original credential %s does not have delegate bit set"%original_hrn)
+ self.logger.error("delegate_credential_string: original credential {} does not have delegate bit set"
+ .format(original_hrn))
return
# the delegating user's gid
my_gid = self.my_gid()
# retrieve the GID for the entity that we're delegating to
- to_gidfile = self.gid (to_hrn,to_type)
-# to_gid = GID ( to_gidfile )
+ to_gidfile = self.gid(to_hrn, to_type)
+# to_gid = GID(to_gidfile )
# to_hrn = delegee_gid.get_hrn()
-# print 'to_hrn',to_hrn
+# print 'to_hrn', to_hrn
delegated_credential = original_credential.delegate(to_gidfile, self.private_key(), my_gid)
return delegated_credential.save_to_string(save_parents=True)
diff --git a/sfa/client/sfascan.py b/sfa/client/sfascan.py
index f7f5dda..136835d 100644
--- a/sfa/client/sfascan.py
+++ b/sfa/client/sfascan.py
@@ -1,3 +1,5 @@
+from __future__ import print_function
+
import sys, os.path
import pickle
import time
@@ -8,7 +10,7 @@ from urlparse import urlparse
try:
import pygraphviz
except:
- print 'Warning, could not import pygraphviz, test mode only'
+ print('Warning, could not import pygraphviz, test mode only')
from optparse import OptionParser
@@ -49,7 +51,7 @@ class VersionCache:
def load (self):
try:
- infile=file(self.filename,'r')
+ infile=open(self.filename,'r')
self.url2version=pickle.load(infile)
infile.close()
except:
@@ -59,7 +61,7 @@ class VersionCache:
def save (self):
try:
- outfile=file(self.filename,'w')
+ outfile=open(self.filename,'w')
pickle.dump(self.url2version,outfile)
outfile.close()
except:
@@ -73,7 +75,7 @@ class VersionCache:
def show (self):
entries=len(self.url2version)
- print "version cache from file %s has %d entries"%(self.filename,entries)
+ print("version cache from file %s has %d entries"%(self.filename,entries))
key_values=self.url2version.items()
def old_first (kv1,kv2): return int(kv1[1][0]-kv2[1][0])
key_values.sort(old_first)
@@ -82,9 +84,9 @@ class VersionCache:
(timestamp,version) = tuple
how_old = time.time()-timestamp
if how_old<=self.expires:
- print url,"-- %d seconds ago"%how_old
+ print(url,"-- %d seconds ago"%how_old)
else:
- print "OUTDATED",url,"(%d seconds ago, expires=%d)"%(how_old,self.expires)
+ print("OUTDATED",url,"(%d seconds ago, expires=%d)"%(how_old,self.expires))
# turns out we might have trailing slashes or not
def normalize (self, url):
@@ -366,5 +368,5 @@ class SfaScan:
# test mode when pygraphviz is not available
except:
entry=entries[0]
- print "GetVersion at %s returned %s"%(entry.url(),entry.get_version())
+ print("GetVersion at %s returned %s"%(entry.url(),entry.get_version()))
diff --git a/sfa/client/sfaserverproxy.py b/sfa/client/sfaserverproxy.py
index d326b3a..e281f66 100644
--- a/sfa/client/sfaserverproxy.py
+++ b/sfa/client/sfaserverproxy.py
@@ -5,8 +5,8 @@ import ssl
try: turn_off_server_verify = { 'context' : ssl._create_unverified_context() }
except: turn_off_server_verify = {}
-import xmlrpclib
-from httplib import HTTPS, HTTPSConnection
+from sfa.util.py23 import xmlrpc_client
+from sfa.util.py23 import http_client
try:
from sfa.util.sfalogging import logger
@@ -23,11 +23,11 @@ except:
class ServerException(Exception):
pass
-class ExceptionUnmarshaller(xmlrpclib.Unmarshaller):
+class ExceptionUnmarshaller(xmlrpc_client.Unmarshaller):
def close(self):
try:
- return xmlrpclib.Unmarshaller.close(self)
- except xmlrpclib.Fault, e:
+ return xmlrpc_client.Unmarshaller.close(self)
+ except xmlrpc_client.Fault as e:
raise ServerException(e.faultString)
##
@@ -37,10 +37,10 @@ class ExceptionUnmarshaller(xmlrpclib.Unmarshaller):
# targetting only python-2.7 we can get rid of some older code
-class XMLRPCTransport(xmlrpclib.Transport):
+class XMLRPCTransport(xmlrpc_client.Transport):
def __init__(self, key_file = None, cert_file = None, timeout = None):
- xmlrpclib.Transport.__init__(self)
+ xmlrpc_client.Transport.__init__(self)
self.timeout=timeout
self.key_file = key_file
self.cert_file = cert_file
@@ -49,9 +49,9 @@ class XMLRPCTransport(xmlrpclib.Transport):
# create a HTTPS connection object from a host descriptor
# host may be a string, or a (host, x509-dict) tuple
host, extra_headers, x509 = self.get_host_info(host)
- conn = HTTPSConnection(host, None, key_file = self.key_file,
- cert_file = self.cert_file,
- **turn_off_server_verify)
+ conn = http_client.HTTPSConnection(host, None, key_file = self.key_file,
+ cert_file = self.cert_file,
+ **turn_off_server_verify)
# Some logic to deal with timeouts. It appears that some (or all) versions
# of python don't set the timeout after the socket is created. We'll do it
@@ -74,21 +74,21 @@ class XMLRPCTransport(xmlrpclib.Transport):
def getparser(self):
unmarshaller = ExceptionUnmarshaller()
- parser = xmlrpclib.ExpatParser(unmarshaller)
+ parser = xmlrpc_client.ExpatParser(unmarshaller)
return parser, unmarshaller
-class XMLRPCServerProxy(xmlrpclib.ServerProxy):
+class XMLRPCServerProxy(xmlrpc_client.ServerProxy):
def __init__(self, url, transport, allow_none=True, verbose=False):
# remember url for GetVersion
# xxx not sure this is still needed as SfaServerProxy has this too
- self.url=url
- xmlrpclib.ServerProxy.__init__(self, url, transport, allow_none=allow_none,
- verbose=verbose,
- **turn_off_server_verify)
+ self.url = url
+ xmlrpc_client.ServerProxy.__init__(self, url, transport, allow_none=allow_none,
+ verbose=verbose,
+ **turn_off_server_verify)
def __getattr__(self, attr):
- logger.debug ("xml-rpc %s method:%s" % (self.url, attr))
- return xmlrpclib.ServerProxy.__getattr__(self, attr)
+ logger.debug("xml-rpc %s method:%s" % (self.url, attr))
+ return xmlrpc_client.ServerProxy.__getattr__(self, attr)
########## the object on which we can send methods that get sent over xmlrpc
class SfaServerProxy:
@@ -99,7 +99,7 @@ class SfaServerProxy:
self.certfile = certfile
self.verbose = verbose
self.timeout = timeout
- # an instance of xmlrpclib.ServerProxy
+ # an instance of xmlrpc_client.ServerProxy
transport = XMLRPCTransport(keyfile, certfile, timeout)
self.serverproxy = XMLRPCServerProxy(url, transport, allow_none=True, verbose=verbose)
diff --git a/sfa/client/sfi.py b/sfa/client/sfi.py
index 6887250..95acf18 100644
--- a/sfa/client/sfi.py
+++ b/sfa/client/sfi.py
@@ -17,7 +17,6 @@ import pickle
import json
import shutil
from lxml import etree
-from StringIO import StringIO
from optparse import OptionParser
from pprint import PrettyPrinter
from tempfile import mkstemp
@@ -34,6 +33,7 @@ from sfa.util.config import Config
from sfa.util.version import version_core
from sfa.util.cache import Cache
from sfa.util.printable import printable
+from sfa.util.py23 import StringIO
from sfa.storage.record import Record
@@ -98,7 +98,7 @@ def filter_records(type, records):
return filtered_records
-def credential_printable (cred):
+def credential_printable(cred):
credential = Credential(cred=cred)
result = ""
result += credential.pretty_cred()
@@ -109,8 +109,8 @@ def credential_printable (cred):
result += "rights={}\n".format(rights)
return result
-def show_credentials (cred_s):
- if not isinstance (cred_s, list): cred_s = [cred_s]
+def show_credentials(cred_s):
+ if not isinstance(cred_s, list): cred_s = [cred_s]
for cred in cred_s:
print("Using Credential {}".format(credential_printable(cred)))
@@ -178,12 +178,12 @@ def save_records_to_file(filename, record_dicts, format="xml"):
print("unknown output format", format)
# minimally check a key argument
-def check_ssh_key (key):
+def check_ssh_key(key):
good_ssh_key = r'^.*(?:ssh-dss|ssh-rsa)[ ]+[A-Za-z0-9+/=]+(?: .*)?$'
return re.match(good_ssh_key, key, re.IGNORECASE)
# load methods
-def normalize_type (type):
+def normalize_type(type):
if type.startswith('au'):
return 'authority'
elif type.startswith('us'):
@@ -215,7 +215,7 @@ def load_record_from_opts(options):
pubkey = open(options.key, 'r').read()
except IOError:
pubkey = options.key
- if not check_ssh_key (pubkey):
+ if not check_ssh_key(pubkey):
raise SfaInvalidArgument(name='key', msg="Could not find file, or wrong key format")
record_dict['reg-keys'] = [pubkey]
if hasattr(options, 'slices') and options.slices:
@@ -259,7 +259,7 @@ from functools import wraps
commands_list=[]
commands_dict={}
-def declare_command (args_string, example, aliases=None):
+def declare_command(args_string, example, aliases=None):
def wrap(m):
name=getattr(m, '__name__')
doc=getattr(m, '__doc__', "-- missing doc --")
@@ -273,12 +273,12 @@ def declare_command (args_string, example, aliases=None):
commands_list.append(alias)
commands_dict[alias]=command_tuple
@wraps(m)
- def new_method (*args, **kwds): return m(*args, **kwds)
+ def new_method(*args, **kwds): return m(*args, **kwds)
return new_method
return wrap
-def remove_none_fields (record):
+def remove_none_fields(record):
none_fields=[ k for (k, v) in record.items() if v is None ]
for k in none_fields: del record[k]
@@ -290,7 +290,7 @@ class Sfi:
required_options=['verbose', 'debug', 'registry', 'sm', 'auth', 'user', 'user_private_key']
@staticmethod
- def default_sfi_dir ():
+ def default_sfi_dir():
if os.path.isfile("./sfi_config"):
return os.getcwd()
else:
@@ -301,7 +301,7 @@ class Sfi:
class DummyOptions:
pass
- def __init__ (self, options=None):
+ def __init__(self, options=None):
if options is None: options=Sfi.DummyOptions()
for opt in Sfi.required_options:
if not hasattr(options, opt):
@@ -321,7 +321,7 @@ class Sfi:
self.client_bootstrap = None
### suitable if no reasonable command has been provided
- def print_commands_help (self, options):
+ def print_commands_help(self, options):
verbose = getattr(options, 'verbose')
format3 = "%10s %-35s %s"
format3offset = 47
@@ -350,7 +350,7 @@ class Sfi:
print(format3 % (command, "<<alias for %s>>"%canonical, ""))
### now if a known command was found we can be more verbose on that one
- def print_help (self):
+ def print_help(self):
print("==================== Generic sfi usage")
self.sfi_parser.print_help()
(doc, _, example, canonical) = commands_dict[self.command]
@@ -422,7 +422,7 @@ class Sfi:
parser = OptionParser(add_help_option=False,
usage="sfi [sfi_options] {} [cmd_options] {}"\
.format(command, args_string))
- parser.add_option ("-h","--help",dest='help',action='store_true',default=False,
+ parser.add_option("-h","--help",dest='help',action='store_true',default=False,
help="Summary of one command usage")
if canonical in ("config"):
@@ -434,7 +434,7 @@ class Sfi:
action="store_true", dest="version_local", default=False,
help="display version of the local client")
- if canonical in ("version", "trusted"):
+ if canonical in ("version", "trusted", "introspect"):
parser.add_option("-R","--registry_interface",
action="store_true", dest="registry_interface", default=False,
help="target the registry interface instead of slice interface")
@@ -453,7 +453,7 @@ class Sfi:
callback=optparse_listvalue_callback)
parser.add_option('-p', '--pis', dest='reg_pis', metavar='<PIs>', help='Set/replace Principal Investigators/Project Managers',
default='', type="str", action='callback', callback=optparse_listvalue_callback)
- parser.add_option ('-X','--extra',dest='extras',default={},type='str',metavar="<EXTRA_ASSIGNS>",
+ parser.add_option('-X','--extra',dest='extras',default={},type='str',metavar="<EXTRA_ASSIGNS>",
action="callback", callback=optparse_dictvalue_callback, nargs=1,
help="set extra/testbed-dependent flags, e.g. --extra enabled=true")
@@ -579,7 +579,7 @@ use this if you mean an authority instead""")
return -1
# complete / find unique match with command set
- command_candidates = Candidates (commands_list)
+ command_candidates = Candidates(commands_list)
input = args[0]
command = command_candidates.only_match(input)
if not command:
@@ -600,8 +600,8 @@ use this if you mean an authority instead""")
if not command_options.type:
sys.exit(1)
- self.read_config ()
- self.bootstrap ()
+ self.read_config()
+ self.bootstrap()
self.logger.debug("Command={}".format(self.command))
try:
@@ -609,7 +609,7 @@ use this if you mean an authority instead""")
except SystemExit:
return 1
except:
- self.logger.log_exc ("sfi command {} failed".format(command))
+ self.logger.log_exc("sfi command {} failed".format(command))
return 1
return retcod
@@ -619,7 +619,7 @@ use this if you mean an authority instead""")
shell_config_file = os.path.join(self.options.sfi_dir, "sfi_config.sh")
try:
if Config.is_ini(config_file):
- config = Config (config_file)
+ config = Config(config_file)
else:
# try upgrading from shell config format
fp, fn = mkstemp(suffix='sfi_config', text=True)
@@ -703,25 +703,25 @@ use this if you mean an authority instead""")
#
# init self-signed cert, user credentials and gid
- def bootstrap (self):
+ def bootstrap(self):
if self.options.verbose:
self.logger.info("Initializing SfaClientBootstrap with {}".format(self.reg_url))
- client_bootstrap = SfaClientBootstrap (self.user, self.reg_url, self.options.sfi_dir,
+ client_bootstrap = SfaClientBootstrap(self.user, self.reg_url, self.options.sfi_dir,
logger=self.logger)
# if -k is provided, use this to initialize private key
if self.options.user_private_key:
- client_bootstrap.init_private_key_if_missing (self.options.user_private_key)
+ client_bootstrap.init_private_key_if_missing(self.options.user_private_key)
else:
# trigger legacy compat code if needed
# the name has changed from just <leaf>.pkey to <hrn>.pkey
if not os.path.isfile(client_bootstrap.private_key_filename()):
- self.logger.info ("private key not found, trying legacy name")
+ self.logger.info("private key not found, trying legacy name")
try:
- legacy_private_key = os.path.join (self.options.sfi_dir, "{}.pkey"
+ legacy_private_key = os.path.join(self.options.sfi_dir, "{}.pkey"
.format(Xrn.unescape(get_leaf(self.user))))
self.logger.debug("legacy_private_key={}"
.format(legacy_private_key))
- client_bootstrap.init_private_key_if_missing (legacy_private_key)
+ client_bootstrap.init_private_key_if_missing(legacy_private_key)
self.logger.info("Copied private key from legacy location {}"
.format(legacy_private_key))
except:
@@ -732,11 +732,11 @@ use this if you mean an authority instead""")
client_bootstrap.bootstrap_my_gid()
# extract what's needed
self.private_key = client_bootstrap.private_key()
- self.my_credential_string = client_bootstrap.my_credential_string ()
+ self.my_credential_string = client_bootstrap.my_credential_string()
self.my_credential = {'geni_type': 'geni_sfa',
'geni_version': '3',
'geni_value': self.my_credential_string}
- self.my_gid = client_bootstrap.my_gid ()
+ self.my_gid = client_bootstrap.my_gid()
self.client_bootstrap = client_bootstrap
@@ -744,13 +744,13 @@ use this if you mean an authority instead""")
if not self.authority:
self.logger.critical("no authority specified. Use -a or set SF_AUTH")
sys.exit(-1)
- return self.client_bootstrap.authority_credential_string (self.authority)
+ return self.client_bootstrap.authority_credential_string(self.authority)
def authority_credential_string(self, auth_hrn):
- return self.client_bootstrap.authority_credential_string (auth_hrn)
+ return self.client_bootstrap.authority_credential_string(auth_hrn)
def slice_credential_string(self, name):
- return self.client_bootstrap.slice_credential_string (name)
+ return self.client_bootstrap.slice_credential_string(name)
def slice_credential(self, name):
return {'geni_type': 'geni_sfa',
@@ -783,18 +783,18 @@ use this if you mean an authority instead""")
# Management of the servers
#
- def registry (self):
+ def registry(self):
# cache the result
- if not hasattr (self, 'registry_proxy'):
+ if not hasattr(self, 'registry_proxy'):
self.logger.info("Contacting Registry at: {}".format(self.reg_url))
self.registry_proxy \
= SfaServerProxy(self.reg_url, self.private_key, self.my_gid,
timeout=self.options.timeout, verbose=self.options.debug)
return self.registry_proxy
- def sliceapi (self):
+ def sliceapi(self):
# cache the result
- if not hasattr (self, 'sliceapi_proxy'):
+ if not hasattr(self, 'sliceapi_proxy'):
# if the command exposes the --component option, figure it's hostname and connect at CM_PORT
if hasattr(self.command_options, 'component') and self.command_options.component:
# resolve the hrn at the registry
@@ -867,18 +867,18 @@ use this if you mean an authority instead""")
return result
### ois = options if supported
- # to be used in something like serverproxy.Method (arg1, arg2, *self.ois(api_options))
- def ois (self, server, option_dict):
- if self.server_supports_options_arg (server):
+ # to be used in something like serverproxy.Method(arg1, arg2, *self.ois(api_options))
+ def ois(self, server, option_dict):
+ if self.server_supports_options_arg(server):
return [option_dict]
- elif self.server_supports_call_id_arg (server):
- return [ unique_call_id () ]
+ elif self.server_supports_call_id_arg(server):
+ return [ unique_call_id() ]
else:
return []
### cis = call_id if supported - like ois
- def cis (self, server):
- if self.server_supports_call_id_arg (server):
+ def cis(self, server):
+ if self.server_supports_call_id_arg(server):
return [ unique_call_id ]
else:
return []
@@ -909,7 +909,7 @@ use this if you mean an authority instead""")
# helper function to analyze raw output
# for main : return 0 if everything is fine, something else otherwise (mostly 1 for now)
- def success (self, raw):
+ def success(self, raw):
return_value = ReturnValue(raw)
output = ReturnValue.get_output(return_value)
# means everything is fine
@@ -926,8 +926,14 @@ use this if you mean an authority instead""")
#==========================================================================
@declare_command("", "")
- def config (self, options, args):
- "Display contents of current config"
+ def config(self, options, args):
+ """
+ Display contents of current config
+ """
+ if len(args) != 0:
+ self.print_help()
+ sys.exit(1)
+
print("# From configuration file {}".format(self.config_file))
flags = [ ('sfi', [ ('registry', 'reg_url'),
('auth', 'authority'),
@@ -936,7 +942,7 @@ use this if you mean an authority instead""")
]),
]
if options.myslice:
- flags.append ( ('myslice', ['backend', 'delegate', 'platform', 'username'] ) )
+ flags.append( ('myslice', ['backend', 'delegate', 'platform', 'username'] ) )
for (section, tuples) in flags:
print("[{}]".format(section))
@@ -955,8 +961,12 @@ use this if you mean an authority instead""")
def version(self, options, args):
"""
display an SFA server version (GetVersion)
- or version information about sfi itself
+ or version information about sfi itself
"""
+ if len(args) != 0:
+ self.print_help()
+ sys.exit(1)
+
if options.version_local:
version = version_core()
else:
@@ -982,6 +992,7 @@ use this if you mean an authority instead""")
if len(args) != 1:
self.print_help()
sys.exit(1)
+
hrn = args[0]
opts = {}
if options.recursive:
@@ -997,7 +1008,7 @@ use this if you mean an authority instead""")
# filter on person, slice, site, node, etc.
# This really should be in the self.filter_records funct def comment...
list = filter_records(options.type, list)
- terminal_render (list, options)
+ terminal_render(list, options)
if options.file:
save_records_to_file(options.file, list, options.fileformat)
# xxx should analyze result
@@ -1011,6 +1022,7 @@ use this if you mean an authority instead""")
if len(args) != 1:
self.print_help()
sys.exit(1)
+
hrn = args[0]
# explicitly require Resolve to run in details mode
resolve_options = {}
@@ -1023,13 +1035,13 @@ use this if you mean an authority instead""")
return
# user has required to focus on some keys
if options.keys:
- def project (record):
+ def project(record):
projected = {}
for key in options.keys:
try: projected[key] = record[key]
except: pass
return projected
- record_dicts = [ project (record) for record in record_dicts ]
+ record_dicts = [ project(record) for record in record_dicts ]
records = [ Record(dict=record_dict) for record_dict in record_dicts ]
for record in records:
if (options.format == "text"): record.dump(sort=True)
@@ -1042,17 +1054,19 @@ use this if you mean an authority instead""")
# this historically was named 'add', it is now 'register' with an alias for legacy
@declare_command("[xml-filename]", "", ['add'])
def register(self, options, args):
- """create new record in registry (Register)
- from command line options (recommended)
- old-school method involving an xml file still supported"""
+ """
+ create new record in registry (Register)
+ from command line options (recommended)
+ old-school method involving an xml file still supported
+ """
+ if len(args) > 1:
+ self.print_help()
+ sys.exit(1)
auth_cred = self.my_authority_credential_string()
if options.show_credential:
show_credentials(auth_cred)
record_dict = {}
- if len(args) > 1:
- self.print_help()
- sys.exit(1)
if len(args) == 1:
try:
record_filepath = args[0]
@@ -1082,11 +1096,17 @@ use this if you mean an authority instead""")
@declare_command("[xml-filename]", "")
def update(self, options, args):
- """update record into registry (Update)
- from command line options (recommended)
- old-school method involving an xml file still supported"""
+ """
+ update record into registry (Update)
+ from command line options (recommended)
+ old-school method involving an xml file still supported
+ """
+ if len(args) > 1:
+ self.print_help()
+ sys.exit(1)
+
record_dict = {}
- if len(args) > 0:
+ if len(args) == 1:
record_filepath = args[0]
rec_file = self.get_record_file(record_filepath)
record_dict.update(load_record_from_file(rec_file).record_to_dict())
@@ -1130,11 +1150,14 @@ use this if you mean an authority instead""")
@declare_command("hrn", "")
def remove(self, options, args):
- "remove registry record by name (Remove)"
+ """
+ remove registry record by name (Remove)
+ """
auth_cred = self.my_authority_credential_string()
if len(args) != 1:
self.print_help()
sys.exit(1)
+
hrn = args[0]
type = options.type
if type in ['all']:
@@ -1157,8 +1180,11 @@ use this if you mean an authority instead""")
"""
discover available resources (ListResources)
"""
- server = self.sliceapi()
+ if len(args) != 0:
+ self.print_help()
+ sys.exit(1)
+ server = self.sliceapi()
# set creds
creds = [self.my_credential]
if options.delegate:
@@ -1182,18 +1208,10 @@ use this if you mean an authority instead""")
api_options['cached'] = False
else:
api_options['cached'] = True
- if options.rspec_version:
- version_manager = VersionManager()
- server_version = self.get_cached_server_version(server)
- if 'sfa' in server_version:
- # just request the version the client wants
- api_options['geni_rspec_version'] = version_manager.get_version(options.rspec_version).to_dict()
- else:
- api_options['geni_rspec_version'] = {'type': options.rspec_version}
- else:
- api_options['geni_rspec_version'] = {'type': 'geni', 'version': '3'}
+ version_manager = VersionManager()
+ api_options['geni_rspec_version'] = version_manager.get_version(options.rspec_version).to_dict()
- list_resources = server.ListResources (creds, api_options)
+ list_resources = server.ListResources(creds, api_options)
value = ReturnValue.get_value(list_resources)
if self.options.raw:
save_raw_to_file(list_resources, self.options.raw, self.options.rawformat, self.options.rawbanner)
@@ -1207,10 +1225,13 @@ use this if you mean an authority instead""")
def describe(self, options, args):
"""
shows currently allocated/provisioned resources
- of the named slice or set of slivers (Describe)
+ of the named slice or set of slivers (Describe)
"""
- server = self.sliceapi()
+ if len(args) != 1:
+ self.print_help()
+ sys.exit(1)
+ server = self.sliceapi()
# set creds
creds = [self.slice_credential(args[0])]
if options.delegate:
@@ -1245,15 +1266,18 @@ use this if you mean an authority instead""")
save_rspec_to_file(value['geni_rspec'], options.file)
if (self.options.raw is None) and (options.file is None):
display_rspec(value['geni_rspec'], options.format)
- return self.success (describe)
+ return self.success(describe)
@declare_command("slice_hrn [<sliver_urn>...]", "")
def delete(self, options, args):
"""
de-allocate and de-provision all or named slivers of the named slice (Delete)
"""
- server = self.sliceapi()
+ if len(args) == 0:
+ self.print_help()
+ sys.exit(1)
+ server = self.sliceapi()
# slice urn
slice_hrn = args[0]
slice_urn = hrn_to_urn(slice_hrn, 'slice')
@@ -1280,18 +1304,19 @@ use this if you mean an authority instead""")
save_raw_to_file(delete, self.options.raw, self.options.rawformat, self.options.rawbanner)
else:
print(value)
- return self.success (delete)
+ return self.success(delete)
@declare_command("slice_hrn rspec", "")
def allocate(self, options, args):
"""
allocate resources to the named slice (Allocate)
"""
- server = self.sliceapi()
- server_version = self.get_cached_server_version(server)
if len(args) != 2:
self.print_help()
sys.exit(1)
+
+ server = self.sliceapi()
+ server_version = self.get_cached_server_version(server)
slice_hrn = args[0]
rspec_file = self.get_rspec_file(args[1])
@@ -1339,7 +1364,7 @@ use this if you mean an authority instead""")
if self.options.raw:
save_raw_to_file(allocate, self.options.raw, self.options.rawformat, self.options.rawbanner)
if options.file is not None:
- save_rspec_to_file (value['geni_rspec'], options.file)
+ save_rspec_to_file(value['geni_rspec'], options.file)
if (self.options.raw is None) and (options.file is None):
print(value)
return self.success(allocate)
@@ -1349,6 +1374,10 @@ use this if you mean an authority instead""")
"""
provision all or named already allocated slivers of the named slice (Provision)
"""
+ if len(args) == 0:
+ self.print_help()
+ sys.exit(1)
+
server = self.sliceapi()
server_version = self.get_cached_server_version(server)
slice_hrn = args[0]
@@ -1404,7 +1433,7 @@ use this if you mean an authority instead""")
if self.options.raw:
save_raw_to_file(provision, self.options.raw, self.options.rawformat, self.options.rawbanner)
if options.file is not None:
- save_rspec_to_file (value['geni_rspec'], options.file)
+ save_rspec_to_file(value['geni_rspec'], options.file)
if (self.options.raw is None) and (options.file is None):
print(value)
return self.success(provision)
@@ -1414,8 +1443,11 @@ use this if you mean an authority instead""")
"""
retrieve the status of the slivers belonging to the named slice (Status)
"""
- server = self.sliceapi()
+ if len(args) != 1:
+ self.print_help()
+ sys.exit(1)
+ server = self.sliceapi()
# slice urn
slice_hrn = args[0]
slice_urn = hrn_to_urn(slice_hrn, 'slice')
@@ -1435,13 +1467,17 @@ use this if you mean an authority instead""")
save_raw_to_file(status, self.options.raw, self.options.rawformat, self.options.rawbanner)
else:
print(value)
- return self.success (status)
+ return self.success(status)
@declare_command("slice_hrn [<sliver_urn>...] action", "")
def action(self, options, args):
"""
Perform the named operational action on all or named slivers of the named slice
"""
+ if len(args) == 0:
+ self.print_help()
+ sys.exit(1)
+
server = self.sliceapi()
api_options = {}
# slice urn
@@ -1467,7 +1503,7 @@ use this if you mean an authority instead""")
save_raw_to_file(perform_action, self.options.raw, self.options.rawformat, self.options.rawbanner)
else:
print(value)
- return self.success (perform_action)
+ return self.success(perform_action)
@declare_command("slice_hrn [<sliver_urn>...] time",
"\n".join(["sfi renew onelab.ple.heartbeat 2015-04-31",
@@ -1477,12 +1513,13 @@ use this if you mean an authority instead""")
"sfi renew onelab.ple.heartbeat +2m",]))
def renew(self, options, args):
"""
- renew slice (Renew)
+ renew slice(Renew)
"""
- server = self.sliceapi()
if len(args) < 2:
self.print_help()
sys.exit(1)
+
+ server = self.sliceapi()
slice_hrn = args[0]
slice_urn = Xrn(slice_hrn, type='slice').get_urn()
@@ -1518,6 +1555,10 @@ use this if you mean an authority instead""")
"""
shutdown named slice (Shutdown)
"""
+ if len(args) != 1:
+ self.print_help()
+ sys.exit(1)
+
server = self.sliceapi()
# slice urn
slice_hrn = args[0]
@@ -1531,7 +1572,7 @@ use this if you mean an authority instead""")
save_raw_to_file(shutdown, self.options.raw, self.options.rawformat, self.options.rawbanner)
else:
print(value)
- return self.success (shutdown)
+ return self.success(shutdown)
@declare_command("[name]", "")
def gid(self, options, args):
@@ -1541,6 +1582,7 @@ use this if you mean an authority instead""")
if len(args) < 1:
self.print_help()
sys.exit(1)
+
target_hrn = args[0]
my_gid_string = open(self.client_bootstrap.my_gid()).read()
gid = self.registry().CreateGid(self.my_credential_string, target_hrn, my_gid_string)
@@ -1567,7 +1609,7 @@ use this if you mean an authority instead""")
because of the two -s options
""")
- def delegate (self, options, args):
+ def delegate(self, options, args):
"""
(locally) create delegate credential for use by given hrn
make sure to check for 'sfi myslice' instead if you plan
@@ -1576,6 +1618,7 @@ use this if you mean an authority instead""")
if len(args) != 1:
self.print_help()
sys.exit(1)
+
to_hrn = args[0]
# support for several delegations in the same call
# so first we gather the things to do
@@ -1583,16 +1626,16 @@ use this if you mean an authority instead""")
for slice_hrn in options.delegate_slices:
message = "{}.slice".format(slice_hrn)
original = self.slice_credential_string(slice_hrn)
- tuples.append ( (message, original,) )
+ tuples.append( (message, original,) )
if options.delegate_pi:
my_authority = self.authority
message = "{}.pi".format(my_authority)
original = self.my_authority_credential_string()
- tuples.append ( (message, original,) )
+ tuples.append( (message, original,) )
for auth_hrn in options.delegate_auths:
message = "{}.auth".format(auth_hrn)
original = self.authority_credential_string(auth_hrn)
- tuples.append ( (message, original, ) )
+ tuples.append( (message, original, ) )
# if nothing was specified at all at this point, let's assume -u
if not tuples:
options.delegate_user = True
@@ -1600,7 +1643,7 @@ use this if you mean an authority instead""")
if options.delegate_user:
message = "{}.user".format(self.user)
original = self.my_credential_string
- tuples.append ( (message, original, ) )
+ tuples.append( (message, original, ) )
# default type for beneficial is user unless -A
to_type = 'authority' if options.delegate_to_authority else 'user'
@@ -1609,7 +1652,7 @@ use this if you mean an authority instead""")
# it's all in the filenaming scheme
for (message, original) in tuples:
delegated_string = self.client_bootstrap.delegate_credential_string(original, to_hrn, to_type)
- delegated_credential = Credential (string=delegated_string)
+ delegated_credential = Credential(string=delegated_string)
filename = os.path.join(self.options.sfi_dir,
"{}_for_{}.{}.cred".format(message, to_hrn, to_type))
delegated_credential.save_to_file(filename, save_parents=True)
@@ -1642,17 +1685,18 @@ $ sfi m -b http://mymanifold.foo.com:7080/
and uses a custom backend for this one call
"""
) # declare_command
- def myslice (self, options, args):
+ def myslice(self, options, args):
""" This helper is for refreshing your credentials at myslice; it will
* compute all the slices that you currently have credentials on
* refresh all your credentials (you as a user and pi, your slices)
* upload them to the manifold backend server
for last phase, sfi_config is read to look for the [myslice] section,
- and namely the 'backend', 'delegate' and 'user' settings"""
+ and namely the 'backend', 'delegate' and 'user' settings
+ """
##########
- if len(args)>0:
+ if len(args) > 0:
self.print_help()
sys.exit(1)
# enable info by default
@@ -1710,11 +1754,15 @@ $ sfi m -b http://mymanifold.foo.com:7080/
# (d) make sure we have *valid* credentials for all these
hrn_credentials = []
- hrn_credentials.append ( (self.user, 'user', self.my_credential_string,) )
+ hrn_credentials.append( (self.user, 'user', self.my_credential_string,) )
for auth_hrn in my_auths:
- hrn_credentials.append ( (auth_hrn, 'auth', self.authority_credential_string(auth_hrn),) )
+ hrn_credentials.append( (auth_hrn, 'auth', self.authority_credential_string(auth_hrn),) )
for slice_hrn in my_slices:
- hrn_credentials.append ( (slice_hrn, 'slice', self.slice_credential_string (slice_hrn),) )
+ try:
+ hrn_credentials.append( (slice_hrn, 'slice', self.slice_credential_string(slice_hrn),) )
+ except:
+ print("WARNING: could not get slice credential for slice {}"
+ .format(slice_hrn))
# (e) check for the delegated version of these
# xxx todo add an option -a/-A? like for 'sfi delegate' for when we ever
@@ -1723,21 +1771,21 @@ $ sfi m -b http://mymanifold.foo.com:7080/
delegatee_hrn = myslice_dict['delegate']
hrn_delegated_credentials = []
for (hrn, htype, credential) in hrn_credentials:
- delegated_credential = self.client_bootstrap.delegate_credential_string (credential, delegatee_hrn, delegatee_type)
+ delegated_credential = self.client_bootstrap.delegate_credential_string(credential, delegatee_hrn, delegatee_type)
# save these so user can monitor what she's uploaded
- filename = os.path.join ( self.options.sfi_dir,
+ filename = os.path.join( self.options.sfi_dir,
"{}.{}_for_{}.{}.cred"\
.format(hrn, htype, delegatee_hrn, delegatee_type))
- with file(filename, 'w') as f:
+ with open(filename, 'w') as f:
f.write(delegated_credential)
self.logger.debug("(Over)wrote {}".format(filename))
- hrn_delegated_credentials.append ((hrn, htype, delegated_credential, filename, ))
+ hrn_delegated_credentials.append((hrn, htype, delegated_credential, filename, ))
# (f) and finally upload them to manifold server
# xxx todo add an option so the password can be set on the command line
# (but *NOT* in the config file) so other apps can leverage this
self.logger.info("Uploading on backend at {}".format(myslice_dict['backend']))
- uploader = ManifoldUploader (logger=self.logger,
+ uploader = ManifoldUploader(logger=self.logger,
url=myslice_dict['backend'],
platform=myslice_dict['platform'],
username=myslice_dict['username'],
@@ -1787,3 +1835,31 @@ $ sfi m -b http://mymanifold.foo.com:7080/
print("Certificate:\n{}\n\n".format(trusted_cert))
# xxx should analyze result
return 0
+
+ @declare_command("", "")
+ def introspect(self, options, args):
+ """
+ If remote server supports XML-RPC instrospection API, allows
+ to list supported methods
+ """
+ if options.registry_interface:
+ server = self.registry()
+ else:
+ server = self.sliceapi()
+ results = server.serverproxy.system.listMethods()
+ # at first sight a list here means it's fine,
+ # and a dict suggests an error (no support for introspection?)
+ if isinstance(results, list):
+ results = [ name for name in results if 'system.' not in name ]
+ results.sort()
+ print("== methods supported at {}".format(server.url))
+ if 'Discover' in results:
+ print("== has support for 'Discover' - most likely a v3")
+ else:
+ print("== has no support for 'Discover' - most likely a v2")
+ for name in results:
+ print(name)
+ else:
+ print("Got return of type {}, expected a list".format(type(results)))
+ print("This suggests the remote end does not support introspection")
+ print(results)
diff --git a/sfa/dummy/dummy_testbed_api_client.py b/sfa/dummy/dummy_testbed_api_client.py
index 38ba0d3..57a0482 100644
--- a/sfa/dummy/dummy_testbed_api_client.py
+++ b/sfa/dummy/dummy_testbed_api_client.py
@@ -1,9 +1,11 @@
-import xmlrpclib
+from __future__ import print_function
+
from datetime import datetime
import time
+from sfa.util.py23 import xmlrpc_client
dummy_url = "http://localhost:8080"
-dummy_api = xmlrpclib.ServerProxy(dummy_url)
+dummy_api = xmlrpc_client.ServerProxy(dummy_url)
# Add a user:
my_user_id = dummy_api.AddUser({'email': 'john.doe@test.net', 'user_name': 'john.doe', 'keys': ['copy here your ssh-rsa public key']})
@@ -11,6 +13,6 @@ my_user_id = dummy_api.AddUser({'email': 'john.doe@test.net', 'user_name': 'john
dummy_api.AddUserToSlice({'slice_id': 2, 'user_id': my_user_id})
# Display the list of users
-print dummy_api.GetUsers()
+print(dummy_api.GetUsers())
# Display the list of slices
-print dummy_api.GetSlices()
+print(dummy_api.GetSlices())
diff --git a/sfa/dummy/dummyshell.py b/sfa/dummy/dummyshell.py
index c6cc833..67007d7 100644
--- a/sfa/dummy/dummyshell.py
+++ b/sfa/dummy/dummyshell.py
@@ -1,9 +1,9 @@
import sys
-import xmlrpclib
import socket
from urlparse import urlparse
from sfa.util.sfalogging import logger
+from sfa.util.py23 import xmlrpc_client
class DummyShell:
"""
@@ -21,12 +21,12 @@ class DummyShell:
def __init__ ( self, config ) :
url = config.SFA_DUMMY_URL
- self.proxy = xmlrpclib.Server(url, verbose = False, allow_none = True)
+ self.proxy = xmlrpc_client.ServerProxy(url, verbose = False, allow_none = True)
def __getattr__(self, name):
def func(*args, **kwds):
if not name in DummyShell.direct_calls:
- raise Exception, "Illegal method call %s for DUMMY driver"%(name)
+ raise Exception("Illegal method call %s for DUMMY driver"%(name))
result=getattr(self.proxy, name)(*args, **kwds)
logger.debug('DummyShell %s returned ... '%(name))
return result
diff --git a/sfa/dummy/dummyslices.py b/sfa/dummy/dummyslices.py
index 7ab94ba..394e816 100644
--- a/sfa/dummy/dummyslices.py
+++ b/sfa/dummy/dummyslices.py
@@ -1,5 +1,4 @@
import time
-from types import StringTypes
from collections import defaultdict
from sfa.util.sfatime import utcparse, datetime_to_epoch
diff --git a/sfa/examples/miniclient.py b/sfa/examples/miniclient.py
index 1c625e0..535b446 100755
--- a/sfa/examples/miniclient.py
+++ b/sfa/examples/miniclient.py
@@ -3,6 +3,8 @@
# this is designed to use a totally empty new directory
# so we demonstrate how to bootstrap the whole thing
+from __future__ import print_function
+
# init logging on console
import logging
console = logging.StreamHandler()
@@ -17,7 +19,7 @@ def unique_call_id(): return uuid.uuid4().urn
import sys
args=sys.argv[1:]
if len(args)!=1:
- print "Usage: %s directory"%sys.argv[0]
+ print("Usage: %s directory"%sys.argv[0])
sys.exit(1)
dir=args[0]
logger.debug('sfaclientsample: Using directory %s'%dir)
@@ -62,8 +64,8 @@ def get_version (url):
bootstrap.self_signed_cert()
server_proxy = bootstrap.server_proxy_simple(url)
server_version = server_proxy.GetVersion()
- print "miniclient: GetVersion at %s returned:"%(url)
- for (k,v) in server_version.iteritems(): print "miniclient: \tversion[%s]=%s"%(k,truncate(v))
+ print("miniclient: GetVersion at %s returned:"%(url))
+ for (k,v) in server_version.iteritems(): print("miniclient: \tversion[%s]=%s"%(k,truncate(v)))
# version_dict = {'type': 'SFA', 'version': '1', }
@@ -79,7 +81,7 @@ def list_resources ():
options [ 'geni_rspec_version' ] = version_dict
options [ 'call_id' ] = unique_call_id()
list_resources = bootstrap.server_proxy (aggregate_url).ListResources(credentials,options)
- print "miniclient: ListResources at %s returned : %s"%(aggregate_url,truncate(list_resources))
+ print("miniclient: ListResources at %s returned : %s"%(aggregate_url,truncate(list_resources)))
def list_slice_resources ():
bootstrap.bootstrap_my_gid()
@@ -90,7 +92,7 @@ def list_slice_resources ():
options [ 'geni_slice_urn' ] = slice_urn
options [ 'call_id' ] = unique_call_id()
list_resources = bootstrap.server_proxy (aggregate_url).ListResources(credentials,options)
- print "miniclient: ListResources at %s for slice %s returned : %s"%(aggregate_url,slice_urn,truncate(list_resources))
+ print("miniclient: ListResources at %s for slice %s returned : %s"%(aggregate_url,slice_urn,truncate(list_resources)))
diff --git a/sfa/federica/fdshell.py b/sfa/federica/fdshell.py
index 42ec030..1e7349c 100644
--- a/sfa/federica/fdshell.py
+++ b/sfa/federica/fdshell.py
@@ -1,6 +1,5 @@
-import xmlrpclib
-
from sfa.util.sfalogging import logger
+from sfa.util.py23 import xmlrpc_client
class FdShell:
"""
@@ -20,9 +19,9 @@ class FdShell:
def __init__ ( self, config ) :
url=config.SFA_FEDERICA_URL
# xxx not sure if java xmlrpc has support for None
- # self.proxy = xmlrpclib.Server(url, verbose = False, allow_none = True)
+ # self.proxy = xmlrpc_client.ServerProxy(url, verbose = False, allow_none = True)
# xxx turn on verbosity
- self.proxy = xmlrpclib.Server(url, verbose = True)
+ self.proxy = xmlrpc_client.ServerProxy(url, verbose = True)
# xxx get credentials from the config ?
# right now basic auth data goes into the URL
@@ -30,7 +29,7 @@ class FdShell:
def __getattr__(self, name):
def func(*args, **kwds):
if name not in FdShell.direct_calls:
- raise Exception, "Illegal method call %s for FEDERICA driver"%(name)
+ raise Exception("Illegal method call %s for FEDERICA driver"%(name))
logger.info("Issuing %s args=%s kwds=%s to federica"%\
(name,args,kwds))
# result=getattr(self.proxy, "AggregateManager.%s"%name)(credential, *args, **kwds)
diff --git a/sfa/iotlab/iotlabaggregate.py b/sfa/iotlab/iotlabaggregate.py
index ee95299..a0edf3a 100644
--- a/sfa/iotlab/iotlabaggregate.py
+++ b/sfa/iotlab/iotlabaggregate.py
@@ -59,6 +59,7 @@ class IotLABAggregate(object):
# rspec_node['boot_state'] = 'true'
if node['state'] == 'Absent' or \
node['state'] == 'Suspected' or \
+ node['state'] == 'Dead' or \
node['state'] == 'Busy':
rspec_node['available'] = 'false'
else:
diff --git a/sfa/iotlab/iotlabshell.py b/sfa/iotlab/iotlabshell.py
index c51a7d6..363e4b5 100644
--- a/sfa/iotlab/iotlabshell.py
+++ b/sfa/iotlab/iotlabshell.py
@@ -138,11 +138,6 @@ class IotLABShell(object):
err.reason)
return {'error': err.reason}
for exp in experiments['items']:
- # BUG IN OAR REST API : job with reservation didn't return
- # resources attribute list. We use another request for
- # finding job resources
- exp_nodes = self.api.method('admin/experiments/%d' % exp['id'])
- exp['resources'] = exp_nodes['nodes']
# BUG ASAP jobs without date information
if exp['date'] == "as soon as possible":
exp['date'] = 0
diff --git a/sfa/managers/aggregate_manager_eucalyptus.py b/sfa/managers/aggregate_manager_eucalyptus.py
deleted file mode 100644
index b8c83d8..0000000
--- a/sfa/managers/aggregate_manager_eucalyptus.py
+++ /dev/null
@@ -1,701 +0,0 @@
-from __future__ import with_statement
-
-import sys
-import os, errno
-import logging
-import datetime
-from multiprocessing import Process
-from time import sleep
-
-import boto
-from boto.ec2.regioninfo import RegionInfo
-from boto.exception import EC2ResponseError
-from ConfigParser import ConfigParser
-from xmlbuilder import XMLBuilder
-from lxml import etree as ET
-from sqlobject import *
-
-from sfa.util.faults import InvalidRSpec
-from sfa.util.xrn import urn_to_hrn, Xrn
-from sfa.util.callids import Callids
-#comes with its own logging
-#from sfa.util.sfalogging import logger
-from sfa.util.version import version_core
-
-from sfa.trust.credential import Credential
-
-from sfa.rspecs.version_manager import VersionManager
-from sfa.rspecs.rspec import RSpec
-
-from sfa.planetlab.plaggregate import PlAggregate
-from sfa.planetlab.plslices import PlSlices
-from sfa.planetlab.plxrn import slicename_to_hrn
-
-##
-# Meta data of an instance.
-#
-class Meta(SQLObject):
- instance = SingleJoin('EucaInstance')
- state = StringCol(default = 'new')
- pub_addr = StringCol(default = None)
- pri_addr = StringCol(default = None)
- start_time = DateTimeCol(default = None)
-
-##
-# A representation of an Eucalyptus instance. This is a support class
-# for instance <-> slice mapping.
-#
-class EucaInstance(SQLObject):
- instance_id = StringCol(unique=True, default=None)
- kernel_id = StringCol()
- image_id = StringCol()
- ramdisk_id = StringCol()
- inst_type = StringCol()
- key_pair = StringCol()
- slice = ForeignKey('Slice')
- meta = ForeignKey('Meta')
-
- ##
- # Contacts Eucalyptus and tries to reserve this instance.
- #
- # @param botoConn A connection to Eucalyptus.
- # @param pubKeys A list of public keys for the instance.
- #
- def reserveInstance(self, botoConn, pubKeys):
- logger = logging.getLogger('EucaAggregate')
- logger.info('Reserving an instance: image: %s, kernel: ' \
- '%s, ramdisk: %s, type: %s, key: %s' % \
- (self.image_id, self.kernel_id, self.ramdisk_id,
- self.inst_type, self.key_pair))
-
- try:
- reservation = botoConn.run_instances(self.image_id,
- kernel_id = self.kernel_id,
- ramdisk_id = self.ramdisk_id,
- instance_type = self.inst_type,
- key_name = self.key_pair,
- user_data = pubKeys)
- for instance in reservation.instances:
- self.instance_id = instance.id
-
- # If there is an error, destroy itself.
- except EC2ResponseError, ec2RespErr:
- errTree = ET.fromstring(ec2RespErr.body)
- msg = errTree.find('.//Message')
- logger.error(msg.text)
- self.destroySelf()
-
-##
-# A representation of a PlanetLab slice. This is a support class
-# for instance <-> slice mapping.
-#
-class Slice(SQLObject):
- slice_hrn = StringCol()
- #slice_index = DatabaseIndex('slice_hrn')
- instances = MultipleJoin('EucaInstance')
-
-##
-# A class that builds the RSpec for Eucalyptus.
-#
-class EucaRSpecBuilder(object):
- ##
- # Initizes a RSpec builder
- #
- # @param cloud A dictionary containing data about a
- # cloud (ex. clusters, ip)
- def __init__(self, cloud):
- self.eucaRSpec = XMLBuilder(format = True, tab_step = " ")
- self.cloudInfo = cloud
-
- ##
- # Creates a request stanza.
- #
- # @param num The number of instances to create.
- # @param image The disk image id.
- # @param kernel The kernel image id.
- # @param keypair Key pair to embed.
- # @param ramdisk Ramdisk id (optional).
- #
- def __requestXML(self, num, image, kernel, keypair, ramdisk = ''):
- xml = self.eucaRSpec
- with xml.request:
- with xml.instances:
- xml << str(num)
- with xml.kernel_image(id=kernel):
- xml << ''
- if ramdisk == '':
- with xml.ramdisk:
- xml << ''
- else:
- with xml.ramdisk(id=ramdisk):
- xml << ''
- with xml.disk_image(id=image):
- xml << ''
- with xml.keypair:
- xml << keypair
-
- ##
- # Creates the cluster stanza.
- #
- # @param clusters Clusters information.
- #
- def __clustersXML(self, clusters):
- cloud = self.cloudInfo
- xml = self.eucaRSpec
-
- for cluster in clusters:
- instances = cluster['instances']
- with xml.cluster(id=cluster['name']):
- with xml.ipv4:
- xml << cluster['ip']
- with xml.vm_types:
- for inst in instances:
- with xml.vm_type(name=inst[0]):
- with xml.free_slots:
- xml << str(inst[1])
- with xml.max_instances:
- xml << str(inst[2])
- with xml.cores:
- xml << str(inst[3])
- with xml.memory(unit='MB'):
- xml << str(inst[4])
- with xml.disk_space(unit='GB'):
- xml << str(inst[5])
- if 'instances' in cloud and inst[0] in cloud['instances']:
- existingEucaInstances = cloud['instances'][inst[0]]
- with xml.euca_instances:
- for eucaInst in existingEucaInstances:
- with xml.euca_instance(id=eucaInst['id']):
- with xml.state:
- xml << eucaInst['state']
- with xml.public_dns:
- xml << eucaInst['public_dns']
-
- def __imageBundleXML(self, bundles):
- xml = self.eucaRSpec
- with xml.bundles:
- for bundle in bundles.keys():
- with xml.bundle(id=bundle):
- xml << ''
-
- ##
- # Creates the Images stanza.
- #
- # @param images A list of images in Eucalyptus.
- #
- def __imagesXML(self, images):
- xml = self.eucaRSpec
- with xml.images:
- for image in images:
- with xml.image(id=image.id):
- with xml.type:
- xml << image.type
- with xml.arch:
- xml << image.architecture
- with xml.state:
- xml << image.state
- with xml.location:
- xml << image.location
-
- ##
- # Creates the KeyPairs stanza.
- #
- # @param keypairs A list of key pairs in Eucalyptus.
- #
- def __keyPairsXML(self, keypairs):
- xml = self.eucaRSpec
- with xml.keypairs:
- for key in keypairs:
- with xml.keypair:
- xml << key.name
-
- ##
- # Generates the RSpec.
- #
- def toXML(self):
- logger = logging.getLogger('EucaAggregate')
- if not self.cloudInfo:
- logger.error('No cloud information')
- return ''
-
- xml = self.eucaRSpec
- cloud = self.cloudInfo
- with xml.RSpec(type='eucalyptus'):
- with xml.network(name=cloud['name']):
- with xml.ipv4:
- xml << cloud['ip']
- #self.__keyPairsXML(cloud['keypairs'])
- #self.__imagesXML(cloud['images'])
- self.__imageBundleXML(cloud['imageBundles'])
- self.__clustersXML(cloud['clusters'])
- return str(xml)
-
-##
-# A parser to parse the output of availability-zones.
-#
-# Note: Only one cluster is supported. If more than one, this will
-# not work.
-#
-class ZoneResultParser(object):
- def __init__(self, zones):
- self.zones = zones
-
- def parse(self):
- if len(self.zones) < 3:
- return
- clusterList = []
- cluster = {}
- instList = []
-
- cluster['name'] = self.zones[0].name
- cluster['ip'] = self.zones[0].state
-
- for i in range(2, len(self.zones)):
- currZone = self.zones[i]
- instType = currZone.name.split()[1]
-
- stateString = currZone.state.split('/')
- rscString = stateString[1].split()
-
- instFree = int(stateString[0])
- instMax = int(rscString[0])
- instNumCpu = int(rscString[1])
- instRam = int(rscString[2])
- instDiskSpace = int(rscString[3])
-
- instTuple = (instType, instFree, instMax, instNumCpu, instRam, instDiskSpace)
- instList.append(instTuple)
- cluster['instances'] = instList
- clusterList.append(cluster)
-
- return clusterList
-
-class AggregateManagerEucalyptus:
-
- # The data structure used to represent a cloud.
- # It contains the cloud name, its ip address, image information,
- # key pairs, and clusters information.
- cloud = {}
-
- # The location of the RelaxNG schema.
- EUCALYPTUS_RSPEC_SCHEMA='/etc/sfa/eucalyptus.rng'
-
- _inited=False
-
- # the init_server mechanism has vanished
- def __init__ (self, config):
- if AggregateManagerEucalyptus._inited: return
- AggregateManagerEucalyptus.init_server()
-
- # Initialize the aggregate manager by reading a configuration file.
- @staticmethod
- def init_server():
- logger = logging.getLogger('EucaAggregate')
- fileHandler = logging.FileHandler('/var/log/euca.log')
- fileHandler.setFormatter(logging.Formatter('%(asctime)s - %(levelname)s - %(message)s'))
- logger.addHandler(fileHandler)
- fileHandler.setLevel(logging.DEBUG)
- logger.setLevel(logging.DEBUG)
-
- configParser = ConfigParser()
- configParser.read(['/etc/sfa/eucalyptus_aggregate.conf', 'eucalyptus_aggregate.conf'])
- if len(configParser.sections()) < 1:
- logger.error('No cloud defined in the config file')
- raise Exception('Cannot find cloud definition in configuration file.')
-
- # Only read the first section.
- cloudSec = configParser.sections()[0]
- AggregateManagerEucalyptus.cloud['name'] = cloudSec
- AggregateManagerEucalyptus.cloud['access_key'] = configParser.get(cloudSec, 'access_key')
- AggregateManagerEucalyptus.cloud['secret_key'] = configParser.get(cloudSec, 'secret_key')
- AggregateManagerEucalyptus.cloud['cloud_url'] = configParser.get(cloudSec, 'cloud_url')
- cloudURL = AggregateManagerEucalyptus.cloud['cloud_url']
- if cloudURL.find('https://') >= 0:
- cloudURL = cloudURL.replace('https://', '')
- elif cloudURL.find('http://') >= 0:
- cloudURL = cloudURL.replace('http://', '')
- (AggregateManagerEucalyptus.cloud['ip'], parts) = cloudURL.split(':')
-
- # Create image bundles
- images = self.getEucaConnection().get_all_images()
- AggregateManagerEucalyptus.cloud['images'] = images
- AggregateManagerEucalyptus.cloud['imageBundles'] = {}
- for i in images:
- if i.type != 'machine' or i.kernel_id is None: continue
- name = os.path.dirname(i.location)
- detail = {'imageID' : i.id, 'kernelID' : i.kernel_id, 'ramdiskID' : i.ramdisk_id}
- AggregateManagerEucalyptus.cloud['imageBundles'][name] = detail
-
- # Initialize sqlite3 database and tables.
- dbPath = '/etc/sfa/db'
- dbName = 'euca_aggregate.db'
-
- if not os.path.isdir(dbPath):
- logger.info('%s not found. Creating directory ...' % dbPath)
- os.mkdir(dbPath)
-
- conn = connectionForURI('sqlite://%s/%s' % (dbPath, dbName))
- sqlhub.processConnection = conn
- Slice.createTable(ifNotExists=True)
- EucaInstance.createTable(ifNotExists=True)
- Meta.createTable(ifNotExists=True)
-
- # Start the update process to keep track of the meta data
- # about Eucalyptus instance.
- Process(target=AggregateManagerEucalyptus.updateMeta).start()
-
- # Make sure the schema exists.
- if not os.path.exists(AggregateManagerEucalyptus.EUCALYPTUS_RSPEC_SCHEMA):
- err = 'Cannot location schema at %s' % AggregateManagerEucalyptus.EUCALYPTUS_RSPEC_SCHEMA
- logger.error(err)
- raise Exception(err)
-
- #
- # A separate process that will update the meta data.
- #
- @staticmethod
- def updateMeta():
- logger = logging.getLogger('EucaMeta')
- fileHandler = logging.FileHandler('/var/log/euca_meta.log')
- fileHandler.setFormatter(logging.Formatter('%(asctime)s - %(levelname)s - %(message)s'))
- logger.addHandler(fileHandler)
- fileHandler.setLevel(logging.DEBUG)
- logger.setLevel(logging.DEBUG)
-
- while True:
- sleep(30)
-
- # Get IDs of the instances that don't have IPs yet.
- dbResults = Meta.select(
- AND(Meta.q.pri_addr == None,
- Meta.q.state != 'deleted')
- )
- dbResults = list(dbResults)
- logger.debug('[update process] dbResults: %s' % dbResults)
- instids = []
- for r in dbResults:
- if not r.instance:
- continue
- instids.append(r.instance.instance_id)
- logger.debug('[update process] Instance Id: %s' % ', '.join(instids))
-
- # Get instance information from Eucalyptus
- conn = self.getEucaConnection()
- vmInstances = []
- reservations = conn.get_all_instances(instids)
- for reservation in reservations:
- vmInstances += reservation.instances
-
- # Check the IPs
- instIPs = [ {'id':i.id, 'pri_addr':i.private_dns_name, 'pub_addr':i.public_dns_name}
- for i in vmInstances if i.private_dns_name != '0.0.0.0' ]
- logger.debug('[update process] IP dict: %s' % str(instIPs))
-
- # Update the local DB
- for ipData in instIPs:
- dbInst = EucaInstance.select(EucaInstance.q.instance_id == ipData['id']).getOne(None)
- if not dbInst:
- logger.info('[update process] Could not find %s in DB' % ipData['id'])
- continue
- dbInst.meta.pri_addr = ipData['pri_addr']
- dbInst.meta.pub_addr = ipData['pub_addr']
- dbInst.meta.state = 'running'
-
- self.dumpinstanceInfo()
-
- ##
- # Creates a connection to Eucalytpus. This function is inspired by
- # the make_connection() in Euca2ools.
- #
- # @return A connection object or None
- #
- def getEucaConnection():
- accessKey = AggregateManagerEucalyptus.cloud['access_key']
- secretKey = AggregateManagerEucalyptus.cloud['secret_key']
- eucaURL = AggregateManagerEucalyptus.cloud['cloud_url']
- useSSL = False
- srvPath = '/'
- eucaPort = 8773
- logger = logging.getLogger('EucaAggregate')
-
- if not accessKey or not secretKey or not eucaURL:
- logger.error('Please set ALL of the required environment ' \
- 'variables by sourcing the eucarc file.')
- return None
-
- # Split the url into parts
- if eucaURL.find('https://') >= 0:
- useSSL = True
- eucaURL = eucaURL.replace('https://', '')
- elif eucaURL.find('http://') >= 0:
- useSSL = False
- eucaURL = eucaURL.replace('http://', '')
- (eucaHost, parts) = eucaURL.split(':')
- if len(parts) > 1:
- parts = parts.split('/')
- eucaPort = int(parts[0])
- parts = parts[1:]
- srvPath = '/'.join(parts)
-
- return boto.connect_ec2(aws_access_key_id=accessKey,
- aws_secret_access_key=secretKey,
- is_secure=useSSL,
- region=RegionInfo(None, 'eucalyptus', eucaHost),
- port=eucaPort,
- path=srvPath)
-
- def ListResources(api, creds, options):
- call_id = options.get('call_id')
- if Callids().already_handled(call_id): return ""
- # get slice's hrn from options
- xrn = options.get('geni_slice_urn', '')
- hrn, type = urn_to_hrn(xrn)
- logger = logging.getLogger('EucaAggregate')
-
- # get hrn of the original caller
- origin_hrn = options.get('origin_hrn', None)
- if not origin_hrn:
- origin_hrn = Credential(string=creds[0]).get_gid_caller().get_hrn()
-
- conn = self.getEucaConnection()
-
- if not conn:
- logger.error('Cannot create a connection to Eucalyptus')
- return 'Cannot create a connection to Eucalyptus'
-
- try:
- # Zones
- zones = conn.get_all_zones(['verbose'])
- p = ZoneResultParser(zones)
- clusters = p.parse()
- AggregateManagerEucalyptus.cloud['clusters'] = clusters
-
- # Images
- images = conn.get_all_images()
- AggregateManagerEucalyptus.cloud['images'] = images
- AggregateManagerEucalyptus.cloud['imageBundles'] = {}
- for i in images:
- if i.type != 'machine' or i.kernel_id is None: continue
- name = os.path.dirname(i.location)
- detail = {'imageID' : i.id, 'kernelID' : i.kernel_id, 'ramdiskID' : i.ramdisk_id}
- AggregateManagerEucalyptus.cloud['imageBundles'][name] = detail
-
- # Key Pairs
- keyPairs = conn.get_all_key_pairs()
- AggregateManagerEucalyptus.cloud['keypairs'] = keyPairs
-
- if hrn:
- instanceId = []
- instances = []
-
- # Get the instances that belong to the given slice from sqlite3
- # XXX use getOne() in production because the slice's hrn is supposed
- # to be unique. For testing, uniqueness is turned off in the db.
- # If the slice isn't found in the database, create a record for the
- # slice.
- matchedSlices = list(Slice.select(Slice.q.slice_hrn == hrn))
- if matchedSlices:
- theSlice = matchedSlices[-1]
- else:
- theSlice = Slice(slice_hrn = hrn)
- for instance in theSlice.instances:
- instanceId.append(instance.instance_id)
-
- # Get the information about those instances using their ids.
- if len(instanceId) > 0:
- reservations = conn.get_all_instances(instanceId)
- else:
- reservations = []
- for reservation in reservations:
- for instance in reservation.instances:
- instances.append(instance)
-
- # Construct a dictionary for the EucaRSpecBuilder
- instancesDict = {}
- for instance in instances:
- instList = instancesDict.setdefault(instance.instance_type, [])
- instInfoDict = {}
-
- instInfoDict['id'] = instance.id
- instInfoDict['public_dns'] = instance.public_dns_name
- instInfoDict['state'] = instance.state
- instInfoDict['key'] = instance.key_name
-
- instList.append(instInfoDict)
- AggregateManagerEucalyptus.cloud['instances'] = instancesDict
-
- except EC2ResponseError, ec2RespErr:
- errTree = ET.fromstring(ec2RespErr.body)
- errMsgE = errTree.find('.//Message')
- logger.error(errMsgE.text)
-
- rspec = EucaRSpecBuilder(AggregateManagerEucalyptus.cloud).toXML()
-
- # Remove the instances records so next time they won't
- # show up.
- if 'instances' in AggregateManagerEucalyptus.cloud:
- del AggregateManagerEucalyptus.cloud['instances']
-
- return rspec
-
- """
- Hook called via 'sfi.py create'
- """
- def CreateSliver(api, slice_xrn, creds, xml, users, options):
- call_id = options.get('call_id')
- if Callids().already_handled(call_id): return ""
-
- logger = logging.getLogger('EucaAggregate')
- logger.debug("In CreateSliver")
-
- aggregate = PlAggregate(self.driver)
- slices = PlSlices(self.driver)
- (hrn, type) = urn_to_hrn(slice_xrn)
- peer = slices.get_peer(hrn)
- sfa_peer = slices.get_sfa_peer(hrn)
- slice_record=None
- if users:
- slice_record = users[0].get('slice_record', {})
-
- conn = self.getEucaConnection()
- if not conn:
- logger.error('Cannot create a connection to Eucalyptus')
- return ""
-
- # Validate RSpec
- schemaXML = ET.parse(AggregateManagerEucalyptus.EUCALYPTUS_RSPEC_SCHEMA)
- rspecValidator = ET.RelaxNG(schemaXML)
- rspecXML = ET.XML(xml)
- for network in rspecXML.iterfind("./network"):
- if network.get('name') != AggregateManagerEucalyptus.cloud['name']:
- # Throw away everything except my own RSpec
- # sfa_logger().error("CreateSliver: deleting %s from rspec"%network.get('id'))
- network.getparent().remove(network)
- if not rspecValidator(rspecXML):
- error = rspecValidator.error_log.last_error
- message = '%s (line %s)' % (error.message, error.line)
- raise InvalidRSpec(message)
-
- """
- Create the sliver[s] (slice) at this aggregate.
- Verify HRN and initialize the slice record in PLC if necessary.
- """
-
- # ensure site record exists
- site = slices.verify_site(hrn, slice_record, peer, sfa_peer)
- # ensure slice record exists
- slice = slices.verify_slice(hrn, slice_record, peer, sfa_peer)
- # ensure person records exists
- persons = slices.verify_persons(hrn, slice, users, peer, sfa_peer)
-
- # Get the slice from db or create one.
- s = Slice.select(Slice.q.slice_hrn == hrn).getOne(None)
- if s is None:
- s = Slice(slice_hrn = hrn)
-
- # Process any changes in existing instance allocation
- pendingRmInst = []
- for sliceInst in s.instances:
- pendingRmInst.append(sliceInst.instance_id)
- existingInstGroup = rspecXML.findall(".//euca_instances")
- for instGroup in existingInstGroup:
- for existingInst in instGroup:
- if existingInst.get('id') in pendingRmInst:
- pendingRmInst.remove(existingInst.get('id'))
- for inst in pendingRmInst:
- dbInst = EucaInstance.select(EucaInstance.q.instance_id == inst).getOne(None)
- if dbInst.meta.state != 'deleted':
- logger.debug('Instance %s will be terminated' % inst)
- # Terminate instances one at a time for robustness
- conn.terminate_instances([inst])
- # Only change the state but do not remove the entry from the DB.
- dbInst.meta.state = 'deleted'
- #dbInst.destroySelf()
-
- # Process new instance requests
- requests = rspecXML.findall(".//request")
- if requests:
- # Get all the public keys associate with slice.
- keys = []
- for user in users:
- keys += user['keys']
- logger.debug("Keys: %s" % user['keys'])
- pubKeys = '\n'.join(keys)
- logger.debug('Passing the following keys to the instance:\n%s' % pubKeys)
- for req in requests:
- vmTypeElement = req.getparent()
- instType = vmTypeElement.get('name')
- numInst = int(req.find('instances').text)
-
- bundleName = req.find('bundle').text
- if not AggregateManagerEucalyptus.cloud['imageBundles'][bundleName]:
- logger.error('Cannot find bundle %s' % bundleName)
- bundleInfo = AggregateManagerEucalyptus.cloud['imageBundles'][bundleName]
- instKernel = bundleInfo['kernelID']
- instDiskImg = bundleInfo['imageID']
- instRamDisk = bundleInfo['ramdiskID']
- instKey = None
-
- # Create the instances
- for i in range(0, numInst):
- eucaInst = EucaInstance(slice = s,
- kernel_id = instKernel,
- image_id = instDiskImg,
- ramdisk_id = instRamDisk,
- key_pair = instKey,
- inst_type = instType,
- meta = Meta(start_time=datetime.datetime.utcnow()))
- eucaInst.reserveInstance(conn, pubKeys)
-
- # xxx - should return altered rspec
- # with enough data for the client to understand what's happened
- return xml
-
- ##
- # Return information on the IP addresses bound to each slice's instances
- #
- def dumpInstanceInfo():
- logger = logging.getLogger('EucaMeta')
- outdir = "/var/www/html/euca/"
- outfile = outdir + "instances.txt"
-
- try:
- os.makedirs(outdir)
- except OSError, e:
- if e.errno != errno.EEXIST:
- raise
-
- dbResults = Meta.select(
- AND(Meta.q.pri_addr != None,
- Meta.q.state == 'running')
- )
- dbResults = list(dbResults)
- f = open(outfile, "w")
- for r in dbResults:
- instId = r.instance.instance_id
- ipaddr = r.pri_addr
- hrn = r.instance.slice.slice_hrn
- logger.debug('[dumpInstanceInfo] %s %s %s' % (instId, ipaddr, hrn))
- f.write("%s %s %s\n" % (instId, ipaddr, hrn))
- f.close()
-
- def GetVersion(api, options):
-
- version_manager = VersionManager()
- ad_rspec_versions = []
- request_rspec_versions = []
- for rspec_version in version_manager.versions:
- if rspec_version.content_type in ['*', 'ad']:
- ad_rspec_versions.append(rspec_version.to_dict())
- if rspec_version.content_type in ['*', 'request']:
- request_rspec_versions.append(rspec_version.to_dict())
- xrn=Xrn(api.hrn)
- version_more = {'interface':'aggregate',
- 'sfa': 1,
- 'geni_api': '2',
- 'testbed':'myplc',
- 'hrn':xrn.get_hrn(),
- 'geni_request_rspec_versions': request_rspec_versions,
- 'geni_ad_rspec_versions': ad_rspec_versions,
- }
- return version_core(version_more)
diff --git a/sfa/managers/component_manager_pl.py b/sfa/managers/component_manager_pl.py
index 8518839..ba9b7eb 100644
--- a/sfa/managers/component_manager_pl.py
+++ b/sfa/managers/component_manager_pl.py
@@ -1,7 +1,6 @@
-import xmlrpclib
-
from sfa.util.faults import SliverDoesNotExist
from sfa.util.version import version_core
+from sfa.util.py23 import xmlrpc_client
from sfa.trust.sfaticket import SfaTicket
@@ -66,7 +65,7 @@ def redeem_ticket(api, ticket_string):
raise SliverDoesNotExist(slicename)
# convert ticket to format nm is used to
- nm_ticket = xmlrpclib.dumps((ticket.attributes,), methodresponse=True)
+ nm_ticket = xmlrpc_client.dumps((ticket.attributes,), methodresponse=True)
api.driver.nodemanager.AdminTicket(nm_ticket)
diff --git a/sfa/managers/eucalyptus/euca_rspec_validator.py b/sfa/managers/eucalyptus/euca_rspec_validator.py
index 7e25063..7ebaae5 100755
--- a/sfa/managers/eucalyptus/euca_rspec_validator.py
+++ b/sfa/managers/eucalyptus/euca_rspec_validator.py
@@ -1,6 +1,7 @@
#!/usr/bin/env python
-from __future__ import with_statement
+from __future__ import print_function
+
import sys
import os
from lxml import etree as ET
@@ -19,9 +20,9 @@ def main():
if not rspecValidator(rspecXML):
error = rspecValidator.error_log.last_error
message = '%s (line %s)' % (error.message, error.line)
- print message
+ print(message)
else:
- print 'It is valid'
+ print('It is valid')
if __name__ == "__main__":
main()
diff --git a/sfa/managers/managerwrapper.py b/sfa/managers/managerwrapper.py
index 946f7d2..da8c98f 100644
--- a/sfa/managers/managerwrapper.py
+++ b/sfa/managers/managerwrapper.py
@@ -27,7 +27,7 @@ class ManagerWrapper:
# that's what happens when there's something wrong with the db
# or any bad stuff of that kind at startup time
logger.log_exc("Failed to create a manager, startup sequence is broken")
- raise SfaAPIError,"Argument to ManagerWrapper must be a module or class"
+ raise SfaAPIError("Argument to ManagerWrapper must be a module or class")
self.interface = interface
def __getattr__(self, method):
diff --git a/sfa/managers/registry_manager.py b/sfa/managers/registry_manager.py
index 1eafc10..78d933d 100644
--- a/sfa/managers/registry_manager.py
+++ b/sfa/managers/registry_manager.py
@@ -1,4 +1,5 @@
-import types
+from __future__ import print_function
+
# for get_key_from_incoming_ip
import tempfile
import os
@@ -161,7 +162,7 @@ class RegistryManager:
def Resolve(self, api, xrns, type=None, details=False):
dbsession = api.dbsession()
- if not isinstance(xrns, types.ListType):
+ if not isinstance(xrns, list):
# try to infer type if not set and we get a single input
if not type:
type = Xrn(xrns).get_type()
@@ -385,7 +386,7 @@ class RegistryManager:
pub_key=getattr(record,'reg-keys',None)
if pub_key is not None:
# use only first key in record
- if pub_key and isinstance(pub_key, types.ListType): pub_key = pub_key[0]
+ if pub_key and isinstance(pub_key, list): pub_key = pub_key[0]
pkey = convert_public_key(pub_key)
email = getattr(record,'email',None)
@@ -414,10 +415,12 @@ class RegistryManager:
elif isinstance (record, RegUser):
# create RegKey objects for incoming keys
if hasattr(record,'reg-keys'):
- keys=getattr(record,'reg-keys')
+ keys = getattr(record, 'reg-keys')
# some people send the key as a string instead of a list of strings
- if isinstance(keys,types.StringTypes): keys=[keys]
- logger.debug ("creating {} keys for user {}".format(len(keys), record.hrn))
+ # note for python2/3 : no need to consider unicode in a key
+ if isinstance(keys, str):
+ keys = [keys]
+ logger.debug("creating {} keys for user {}".format(len(keys), record.hrn))
record.reg_keys = [ RegKey (key) for key in keys ]
# update testbed-specific data if needed
@@ -458,7 +461,7 @@ class RegistryManager:
if type == 'user':
if getattr(new_record, 'keys', None):
new_key = new_record.keys
- if isinstance (new_key, types.ListType):
+ if isinstance (new_key, list):
new_key = new_key[0]
# take new_key into account
@@ -503,7 +506,7 @@ class RegistryManager:
# record.__dict__ as received by the driver seems to be off
# anyway the driver should receive an object
# (and then extract __dict__ itself if needed)
- print "DO NOT REMOVE ME before driver.update, record={}".format(record)
+ print("DO NOT REMOVE ME before driver.update, record={}".format(record))
# as of June 2015: I suspect we could remove that print line above and replace it with
# augment_with_sfa_builtins(record)
# instead, that checks for these fields, like it is done above in List()
@@ -623,7 +626,7 @@ class RegistryManager:
for command in all_commands:
(status, output) = commands.getstatusoutput(command)
if status:
- raise Exception, output
+ raise Exception(output)
for filename in [key_filename, gid_filename]:
os.unlink(filename)
diff --git a/sfa/managers/slice_manager.py b/sfa/managers/slice_manager.py
index 2a99b6f..8252ec3 100644
--- a/sfa/managers/slice_manager.py
+++ b/sfa/managers/slice_manager.py
@@ -1,7 +1,6 @@
import sys
import time
import traceback
-from StringIO import StringIO
from copy import copy
from lxml import etree
@@ -73,7 +72,7 @@ class SliceManager:
stats_elements = rspec.xml.xpath('//statistics')
for node in stats_elements:
node.getparent().remove(node)
- except Exception, e:
+ except Exception as e:
logger.warn("drop_slicemgr_stats failed: %s " % (str(e)))
def add_slicemgr_stat(self, rspec, callname, aggname, elapsed, status, exc_info=None):
@@ -99,7 +98,7 @@ class SliceManager:
exc_frame = exc_tag.add_element("tb_frame", filename=str(item[0]),
line=str(item[1]), func=str(item[2]), code=str(item[3]))
- except Exception, e:
+ except Exception as e:
logger.warn("add_slicemgr_stat failed on %s: %s" %(aggname, str(e)))
def ListResources(self, api, creds, options):
@@ -117,7 +116,7 @@ class SliceManager:
forward_options['geni_rspec_version'] = options.get('geni_rspec_version')
result = server.ListResources(credential, forward_options)
return {"aggregate": aggregate, "result": result, "elapsed": time.time()-tStart, "status": "success"}
- except Exception, e:
+ except Exception as e:
api.logger.log_exc("ListResources failed at %s" %(server.url))
return {"aggregate": aggregate, "elapsed": time.time()-tStart, "status": "exception", "exc_info": sys.exc_info()}
diff --git a/sfa/methods/Describe.py b/sfa/methods/Describe.py
index ae198c0..69d4fb7 100644
--- a/sfa/methods/Describe.py
+++ b/sfa/methods/Describe.py
@@ -55,7 +55,7 @@ class Describe(Method):
self.api.logger.debug("ListResources: sfatables on chain %s"%chain_name)
desc['geni_rspec'] = run_sfatables(chain_name, '', origin_hrn, desc['geni_rspec'])
- if options.has_key('geni_compressed') and options['geni_compressed'] == True:
+ if 'geni_compressed' in options and options['geni_compressed'] == True:
desc['geni_rspec'] = zlib.compress(desc['geni_rspec']).encode('base64')
return desc
diff --git a/sfa/methods/ListResources.py b/sfa/methods/ListResources.py
index 33777fd..795594b 100644
--- a/sfa/methods/ListResources.py
+++ b/sfa/methods/ListResources.py
@@ -50,7 +50,7 @@ class ListResources(Method):
self.api.logger.debug("ListResources: sfatables on chain %s"%chain_name)
filtered_rspec = run_sfatables(chain_name, '', origin_hrn, rspec)
- if options.has_key('geni_compressed') and options['geni_compressed'] == True:
+ if 'geni_compressed' in options and options['geni_compressed'] == True:
filtered_rspec = zlib.compress(filtered_rspec).encode('base64')
return filtered_rspec
diff --git a/sfa/methods/Resolve.py b/sfa/methods/Resolve.py
index dc34f75..fc12df1 100644
--- a/sfa/methods/Resolve.py
+++ b/sfa/methods/Resolve.py
@@ -1,5 +1,3 @@
-import types
-
from sfa.util.xrn import Xrn, urn_to_hrn
from sfa.util.method import Method
@@ -37,7 +35,7 @@ class Resolve(Method):
if 'details' in options: details=options['details']
else: details=False
type = None
- if not isinstance(xrns, types.ListType):
+ if not isinstance(xrns, list):
type = Xrn(xrns).get_type()
xrns=[xrns]
hrns = [urn_to_hrn(xrn)[0] for xrn in xrns]
diff --git a/sfa/nitos/nitosdriver.py b/sfa/nitos/nitosdriver.py
index 40db2a7..8e3da34 100644
--- a/sfa/nitos/nitosdriver.py
+++ b/sfa/nitos/nitosdriver.py
@@ -668,7 +668,7 @@ class NitosDriver (Driver):
# xxx this code is quite old and has not run for ages
# it is obviously totally broken and needs a rewrite
def get_ticket (self, slice_urn, slice_hrn, creds, rspec_string, options):
- raise SfaNotImplemented,"NitosDriver.get_ticket needs a rewrite"
+ raise SfaNotImplemented("NitosDriver.get_ticket needs a rewrite")
# please keep this code for future reference
# slices = PlSlices(self)
# peer = slices.get_peer(slice_hrn)
diff --git a/sfa/nitos/nitosshell.py b/sfa/nitos/nitosshell.py
index 269e367..cf543f2 100644
--- a/sfa/nitos/nitosshell.py
+++ b/sfa/nitos/nitosshell.py
@@ -1,9 +1,9 @@
import sys
-import xmlrpclib
import socket
from urlparse import urlparse
from sfa.util.sfalogging import logger
+from sfa.util.py23 import xmlrpc_client
class NitosShell:
"""
@@ -26,14 +26,14 @@ class NitosShell:
# use the 'capability' auth mechanism for higher performance when the PLC db is local
def __init__ ( self, config ) :
url = config.SFA_NITOS_URL
- self.proxy = xmlrpclib.Server(url, verbose = False, allow_none = True)
+ self.proxy = xmlrpc_client.ServerProxy(url, verbose = False, allow_none = True)
def __getattr__(self, name):
def func(*args, **kwds):
actual_name=None
if name in NitosShell.direct_calls: actual_name=name
if not actual_name:
- raise Exception, "Illegal method call %s for NITOS driver"%(name)
+ raise Exception("Illegal method call %s for NITOS driver"%(name))
actual_name = "scheduler.server." + actual_name
result=getattr(self.proxy, actual_name)(*args, **kwds)
logger.debug('NitosShell %s (%s) returned ... '%(name,actual_name))
diff --git a/sfa/nitos/nitosslices.py b/sfa/nitos/nitosslices.py
index 875a5a9..ffdb6e9 100644
--- a/sfa/nitos/nitosslices.py
+++ b/sfa/nitos/nitosslices.py
@@ -1,4 +1,3 @@
-from types import StringTypes
from collections import defaultdict
from sfa.util.sfatime import utcparse, datetime_to_epoch
diff --git a/sfa/nitos/nitosxrn.py b/sfa/nitos/nitosxrn.py
index 47d8aad..9722ffe 100644
--- a/sfa/nitos/nitosxrn.py
+++ b/sfa/nitos/nitosxrn.py
@@ -1,3 +1,5 @@
+from __future__ import print_function
+
# specialized Xrn class for NITOS
import re
from sfa.util.xrn import Xrn
@@ -110,4 +112,4 @@ if __name__ == '__main__':
#slice_name = NitosXrn(xrn="omf.nitos.aminesl",type='slice').nitos_slicename()
slicename = "giorgos_n"
hrn = slicename_to_hrn("pla", "nitos", slicename)
- print hrn
+ print(hrn)
diff --git a/sfa/openstack/osaggregate.py b/sfa/openstack/osaggregate.py
index 2b65399..29681a0 100644
--- a/sfa/openstack/osaggregate.py
+++ b/sfa/openstack/osaggregate.py
@@ -395,7 +395,7 @@ class OSAggregate:
meta=metadata,
name=instance_name)
slivers.append(server)
- except Exception, err:
+ except Exception as err:
logger.log_exc(err)
return slivers
diff --git a/sfa/openstack/security_group.py b/sfa/openstack/security_group.py
index 70e191e..6aced8c 100644
--- a/sfa/openstack/security_group.py
+++ b/sfa/openstack/security_group.py
@@ -9,7 +9,7 @@ class SecurityGroup:
def create_security_group(self, name):
try:
self.client.security_groups.create(name=name, description=name)
- except Exception, ex:
+ except Exception as ex:
logger.log_exc("Failed to add security group")
raise
@@ -17,7 +17,7 @@ class SecurityGroup:
try:
security_group = self.client.security_groups.find(name=name)
self.client.security_groups.delete(security_group.id)
- except Exception, ex:
+ except Exception as ex:
logger.log_exc("Failed to delete security group")
@@ -58,7 +58,7 @@ class SecurityGroup:
group = self.client.security_groups.find(name=group_name)
self.client.security_group_rules.create(group.id, \
protocol, from_port, to_port,cidr_ip)
- except Exception, ex:
+ except Exception as ex:
logger.log_exc("Failed to add rule to group %s" % group_name)
@@ -81,6 +81,6 @@ class SecurityGroup:
rule = self.client.security_group_rules.find(**filter)
if rule:
self.client.security_group_rules.delete(rule)
- except Exception, ex:
+ except Exception as ex:
logger.log_exc("Failed to remove rule from group %s" % group_name)
diff --git a/sfa/openstack/shell.py b/sfa/openstack/shell.py
index acb9cff..e31be9d 100644
--- a/sfa/openstack/shell.py
+++ b/sfa/openstack/shell.py
@@ -1,5 +1,4 @@
import sys
-import xmlrpclib
import socket
import gettext
from urlparse import urlparse
diff --git a/sfa/planetlab/nodemanager.py b/sfa/planetlab/nodemanager.py
index 1671b9d..12e7f22 100644
--- a/sfa/planetlab/nodemanager.py
+++ b/sfa/planetlab/nodemanager.py
@@ -24,6 +24,8 @@ class NodeManager:
(fd, filename) = tempfile.mkstemp(dir=path)
scriptname = script_path + os.sep + filename.split(os.sep)[-1:][0]
# define the script to execute
+ # when providing support for python3 wrt xmlrpclib
+ # looks safer to keep things as-is
script = """
#!%(python)s
import xmlrpclib
diff --git a/sfa/planetlab/peers.py b/sfa/planetlab/peers.py
index 63c9f7f..7c6e1b7 100644
--- a/sfa/planetlab/peers.py
+++ b/sfa/planetlab/peers.py
@@ -1,5 +1,6 @@
from sfa.util.xrn import get_authority
-from types import StringTypes
+
+from sfa.util.py23 import StringType
def get_peer(pldriver, hrn):
# Because of myplc native federation, we first need to determine if this
@@ -15,7 +16,7 @@ def get_peer(pldriver, hrn):
# check if we are already peered with this site_authority, if so
peers = pldriver.shell.GetPeers( {}, ['peer_id', 'peername', 'shortname', 'hrn_root'])
for peer_record in peers:
- names = [name.lower() for name in peer_record.values() if isinstance(name, StringTypes)]
+ names = [name.lower() for name in peer_record.values() if isinstance(name, StringType)]
if site_authority in names:
peer = peer_record['shortname']
diff --git a/sfa/planetlab/plshell.py b/sfa/planetlab/plshell.py
index 16af128..15c58b6 100644
--- a/sfa/planetlab/plshell.py
+++ b/sfa/planetlab/plshell.py
@@ -89,7 +89,7 @@ class PlShell:
if name in PlShell.direct_calls: actual_name=name
if name in PlShell.alias_calls: actual_name=PlShell.alias_calls[name]
if not actual_name:
- raise Exception, "Illegal method call %s for PL driver"%(name)
+ raise Exception("Illegal method call %s for PL driver"%(name))
result=getattr(self.proxy, actual_name)(self.plauth, *args, **kwds)
logger.debug('PlShell %s (%s) returned ... '%(name,actual_name))
return result
diff --git a/sfa/planetlab/plslices.py b/sfa/planetlab/plslices.py
index ffa6f16..2b59a01 100644
--- a/sfa/planetlab/plslices.py
+++ b/sfa/planetlab/plslices.py
@@ -1,5 +1,4 @@
import time
-from types import StringTypes
from collections import defaultdict
from sfa.util.sfatime import utcparse, datetime_to_epoch
diff --git a/sfa/planetlab/plxrn.py b/sfa/planetlab/plxrn.py
index 1b0e474..70ff5e0 100644
--- a/sfa/planetlab/plxrn.py
+++ b/sfa/planetlab/plxrn.py
@@ -1,3 +1,5 @@
+from __future__ import print_function
+
# specialized Xrn class for PlanetLab
import re
from sfa.util.xrn import Xrn, get_authority
@@ -27,14 +29,15 @@ def top_auth (hrn):
def hash_loginbase(site_hrn):
if len(site_hrn) <= 12:
- return site_hrn.replace('.','8')
+ return site_hrn.replace('.','8').replace('_', '8')
ratio = float(12) / len(site_hrn)
auths_tab = site_hrn.split('.')
auths_tab2 = []
for auth in auths_tab:
- auth2 = auth[:int(len(auth)*ratio)]
- auths_tab2.append(auth2)
+ auth = auth.replace('_', '8')
+ auth2 = auth[:int(len(auth)*ratio)]
+ auths_tab2.append(auth2)
return '8'.join(auths_tab2)
class PlXrn (Xrn):
@@ -102,3 +105,15 @@ class PlXrn (Xrn):
base = re.sub('[\\\\]*[^a-zA-Z0-9]', '', base)
return base
+
+tests = [
+ 'inria.foo.x',
+ 'in.foo.x_y',
+ 'inria.foo.longer',
+ 'onelab.upmc.fit_demo',
+ 'onelab.upmc.fit_demo.some_other',
+]
+
+if __name__ == '__main__':
+ for test in tests:
+ print(("{} - hash_loginbase -> {}".format(test, hash_loginbase(test))))
diff --git a/sfa/planetlab/topology.py b/sfa/planetlab/topology.py
index cd6036a..c05b198 100644
--- a/sfa/planetlab/topology.py
+++ b/sfa/planetlab/topology.py
@@ -25,6 +25,6 @@ class Topology(set):
tup = line.split()
if len(tup) > 1:
self.add((tup[0], tup[1]))
- except Exception, e:
+ except Exception as e:
logger.log_exc("Could not find or load the configuration file: %s" % config_file)
raise
diff --git a/sfa/planetlab/vlink.py b/sfa/planetlab/vlink.py
index 625963d..b0a8300 100644
--- a/sfa/planetlab/vlink.py
+++ b/sfa/planetlab/vlink.py
@@ -41,7 +41,7 @@ def get_tc_rate(s):
if m is None:
return -1
suffix = m.group(2).lower()
- if suffixes.has_key(suffix):
+ if suffix in suffixes:
return int(float(m.group(1)) * suffixes[suffix])
else:
return -1
diff --git a/sfa/rspecs/elements/versions/iotlabv1Sliver.py b/sfa/rspecs/elements/versions/iotlabv1Sliver.py
index 0f9fb01..269de56 100644
--- a/sfa/rspecs/elements/versions/iotlabv1Sliver.py
+++ b/sfa/rspecs/elements/versions/iotlabv1Sliver.py
@@ -1,3 +1,5 @@
+from __future__ import print_function
+
from sfa.rspecs.elements.element import Element
from sfa.rspecs.elements.sliver import Sliver
@@ -50,7 +52,7 @@ class Iotlabv1Sliver:
sliver['type'] = sliver_elem.attrib['name']
#sliver['images'] = Iotlabv1DiskImage.get_images(sliver_elem)
- print>>sys.stderr, "\r\n \r\n SLABV1SLIVER.PY \t\t\t get_slivers sliver %s " %( sliver)
+ print("\r\n \r\n SLABV1SLIVER.PY \t\t\t get_slivers sliver %s " %( sliver), file=sys.stderr)
slivers.append(sliver)
return slivers
diff --git a/sfa/rspecs/elements/versions/ofeliav1Port.py b/sfa/rspecs/elements/versions/ofeliav1Port.py
index f4cf74d..009d551 100644
--- a/sfa/rspecs/elements/versions/ofeliav1Port.py
+++ b/sfa/rspecs/elements/versions/ofeliav1Port.py
@@ -8,7 +8,7 @@ class Ofeliav1Port:
@staticmethod
def add_portrs(xml, ports):
- raise Exception, "not implemented yet"
+ raise Exception("not implemented yet")
if not ports:
return
if not isinstance(ports, list):
@@ -22,7 +22,7 @@ class Ofeliav1Port:
@staticmethod
def add_port_attribute(xml, name, value):
- raise Exception, "not implemented yet"
+ raise Exception("not implemented yet")
elem = xml.add_element(name)
elem.set_text(value)
diff --git a/sfa/rspecs/elements/versions/pgv2Node.py b/sfa/rspecs/elements/versions/pgv2Node.py
index 60447b0..a61749c 100644
--- a/sfa/rspecs/elements/versions/pgv2Node.py
+++ b/sfa/rspecs/elements/versions/pgv2Node.py
@@ -130,7 +130,7 @@ class PGv2Node:
node['slivers'] = PGv2SliverType.get_slivers(node_elem)
# get boot state
- available_elems = node_elem.xpath('./default:available | ./available')
+ available_elems = node_elem.xpath('./default:available | ./available')
if len(available_elems) > 0 and 'now' in available_elems[0].attrib:
if available_elems[0].attrib.get('now', '').lower() == 'true':
node['boot_state'] = 'boot'
diff --git a/sfa/rspecs/pg_rspec_converter.py b/sfa/rspecs/pg_rspec_converter.py
index b39184a..ef021c0 100755
--- a/sfa/rspecs/pg_rspec_converter.py
+++ b/sfa/rspecs/pg_rspec_converter.py
@@ -1,10 +1,13 @@
#!/usr/bin/python
+from __future__ import print_function
+
from lxml import etree
-from StringIO import StringIO
from sfa.util.xrn import Xrn, urn_to_hrn
from sfa.rspecs.rspec import RSpec
from sfa.rspecs.version_manager import VersionManager
+from sfa.util.py23 import StringIO
+
xslt='''<xsl:stylesheet version="1.0" xmlns:xsl="http://www.w3.org/1999/XSL/Transform">
<xsl:output method="xml" indent="no"/>
@@ -91,4 +94,4 @@ class PGRSpecConverter:
if __name__ == '__main__':
import sys
if len(sys.argv) > 1:
- print PGRSpecConverter.to_sfa_rspec(sys.argv[1])
+ print(PGRSpecConverter.to_sfa_rspec(sys.argv[1]))
diff --git a/sfa/rspecs/rspec_converter.py b/sfa/rspecs/rspec_converter.py
index 7dff2f0..5d7bdd6 100755
--- a/sfa/rspecs/rspec_converter.py
+++ b/sfa/rspecs/rspec_converter.py
@@ -1,5 +1,7 @@
#!/usr/bin/python
+from __future__ import print_function
+
from sfa.rspecs.pg_rspec_converter import PGRSpecConverter
from sfa.rspecs.sfa_rspec_converter import SfaRSpecConverter
from sfa.rspecs.rspec import RSpec
@@ -39,8 +41,8 @@ if __name__ == '__main__':
pg_rspec = 'test/protogeni.rspec'
sfa_rspec = 'test/nodes.rspec'
- print "converting pg rspec to sfa rspec"
- print RSpecConverter.to_sfa_rspec(pg_rspec)
+ print("converting pg rspec to sfa rspec")
+ print(RSpecConverter.to_sfa_rspec(pg_rspec))
- print "converting sfa rspec to pg rspec"
- print RSpecConverter.to_pg_rspec(sfa_rspec)
+ print("converting sfa rspec to pg rspec")
+ print(RSpecConverter.to_pg_rspec(sfa_rspec))
diff --git a/sfa/rspecs/sfa_rspec_converter.py b/sfa/rspecs/sfa_rspec_converter.py
index 7bcc787..577b788 100755
--- a/sfa/rspecs/sfa_rspec_converter.py
+++ b/sfa/rspecs/sfa_rspec_converter.py
@@ -1,5 +1,7 @@
#!/usr/bin/python
+from __future__ import print_function
+
from sfa.util.xrn import hrn_to_urn
from sfa.rspecs.rspec import RSpec
from sfa.rspecs.version_manager import VersionManager
@@ -83,4 +85,4 @@ class SfaRSpecConverter:
if __name__ == '__main__':
import sys
if len(sys.argv) > 1:
- print SfaRSpecConverter.to_pg_rspec(sys.argv[1])
+ print(SfaRSpecConverter.to_pg_rspec(sys.argv[1]))
diff --git a/sfa/rspecs/version.py b/sfa/rspecs/version.py
index 0c74283..91c96e0 100644
--- a/sfa/rspecs/version.py
+++ b/sfa/rspecs/version.py
@@ -8,7 +8,7 @@ class RSpecVersion:
schema = None
namespace = None
extensions = {}
- namespaces = dict(extensions.items() + [('default', namespace)])
+ namespaces = dict(list(extensions.items()) + [('default', namespace)])
elements = []
enabled = False
@@ -21,7 +21,7 @@ class RSpecVersion:
'version': self.version,
'schema': self.schema,
'namespace': self.namespace,
- 'extensions': self.extensions.values()
+ 'extensions': list(self.extensions.values()),
}
def __str__(self):
diff --git a/sfa/rspecs/version_manager.py b/sfa/rspecs/version_manager.py
index 9efabea..e0a604e 100644
--- a/sfa/rspecs/version_manager.py
+++ b/sfa/rspecs/version_manager.py
@@ -1,7 +1,10 @@
+from __future__ import print_function
+
import os
from sfa.util.faults import InvalidRSpec, UnsupportedRSpecVersion
from sfa.rspecs.version import RSpecVersion
from sfa.util.sfalogging import logger
+from sfa.util.py23 import StringType
class VersionManager:
def __init__(self):
@@ -47,7 +50,7 @@ class VersionManager:
retval = None
if isinstance(version, dict):
retval = self._get_version(version.get('type'), version.get('version'), version.get('content_type'))
- elif isinstance(version, basestring):
+ elif isinstance(version, StringType):
version_parts = version.split(' ')
num_parts = len(version_parts)
type = version_parts[0]
@@ -78,19 +81,19 @@ class VersionManager:
def show_by_string(self, string):
try:
- print self.get_version(string)
+ print(self.get_version(string))
except Exception as e:
- print e
+ print(e)
def show_by_schema(self, string):
try:
- print self.get_version_by_schema(string)
+ print(self.get_version_by_schema(string))
except Exception as e:
- print e
+ print(e)
if __name__ == '__main__':
manager = VersionManager()
- print manager
+ print(manager)
manager.show_by_string('sfa 1')
manager.show_by_string('protogeni 2')
manager.show_by_string('protogeni 2 advertisement')
diff --git a/sfa/rspecs/versions/iotlabv1.py b/sfa/rspecs/versions/iotlabv1.py
index ad49157..0d39323 100644
--- a/sfa/rspecs/versions/iotlabv1.py
+++ b/sfa/rspecs/versions/iotlabv1.py
@@ -1,3 +1,5 @@
+from __future__ import print_function
+
from copy import deepcopy
@@ -30,7 +32,7 @@ class Iotlabv1(RSpecVersion):
'flack': "http://www.protogeni.net/resources/rspec/ext/flack/1",
'planetlab': "http://www.planet-lab.org/resources/sfa/ext/planetlab/1",
}
- namespaces = dict(extensions.items() + [('default', namespace)])
+ namespaces = dict(list(extensions.items()) + [('default', namespace)])
elements = []
# Network
@@ -78,10 +80,10 @@ class Iotlabv1(RSpecVersion):
# Slivers
def get_sliver_attributes(self, hostname, node, network=None):
- print>>sys.stderr, "\r\n \r\n \r\n \t\t SLABV1.PY get_sliver_attributes hostname %s " %(hostname)
+ print("\r\n \r\n \r\n \t\t SLABV1.PY get_sliver_attributes hostname %s " %(hostname), file=sys.stderr)
nodes = self.get_nodes({'component_id': '*%s*' %hostname})
attribs = []
- print>>sys.stderr, "\r\n \r\n \r\n \t\t SLABV1.PY get_sliver_attributes-----------------nodes %s " %(nodes)
+ print("\r\n \r\n \r\n \t\t SLABV1.PY get_sliver_attributes-----------------nodes %s " %(nodes), file=sys.stderr)
if nodes is not None and isinstance(nodes, list) and len(nodes) > 0:
node = nodes[0]
#if node :
@@ -93,7 +95,7 @@ class Iotlabv1(RSpecVersion):
sliver = sliver[0]
attribs = sliver
#attribs = self.attributes_list(sliver)
- print>>sys.stderr, "\r\n \r\n \r\n \t\t SLABV1.PY get_sliver_attributes----------NN------- sliver %s self.namespaces %s attribs %s " %(sliver, self.namespaces,attribs)
+ print("\r\n \r\n \r\n \t\t SLABV1.PY get_sliver_attributes----------NN------- sliver %s self.namespaces %s attribs %s " %(sliver, self.namespaces,attribs), file=sys.stderr)
return attribs
def get_slice_attributes(self, network=None):
@@ -143,7 +145,7 @@ class Iotlabv1(RSpecVersion):
if attributes is None: attributes=[]
# all nodes hould already be present in the rspec. Remove all
# nodes that done have slivers
- print>>sys.stderr, "\r\n \r\n \r\n \t\t\t Iotlabv1.PY add_slivers ----->get_node "
+ print("\r\n \r\n \r\n \t\t\t Iotlabv1.PY add_slivers ----->get_node ", file=sys.stderr)
for hostname in hostnames:
node_elems = self.get_nodes({'component_id': '*%s*' % hostname})
if not node_elems:
@@ -163,7 +165,7 @@ class Iotlabv1(RSpecVersion):
continue
sliver = {'type': requested_sliver_type,
'pl_tags': attributes}
- print>>sys.stderr, "\r\n \r\n \r\n \t\t\t Iotlabv1.PY add_slivers node_elem %s sliver_type %s \r\n \r\n " %(node_elem, sliver_type)
+ print("\r\n \r\n \r\n \t\t\t Iotlabv1.PY add_slivers node_elem %s sliver_type %s \r\n \r\n " %(node_elem, sliver_type), file=sys.stderr)
# remove available element
for available_elem in node_elem.xpath('./default:available | ./available'):
node_elem.remove(available_elem)
@@ -303,4 +305,4 @@ if __name__ == '__main__':
r = RSpec('/tmp/iotlab.rspec')
r.load_rspec_elements(Iotlabv1.elements)
r.namespaces = Iotlabv1.namespaces
- print r.get(RSpecElements.NODE)
+ print(r.get(RSpecElements.NODE))
diff --git a/sfa/rspecs/versions/nitosv1.py b/sfa/rspecs/versions/nitosv1.py
index 3288b48..60caf1d 100644
--- a/sfa/rspecs/versions/nitosv1.py
+++ b/sfa/rspecs/versions/nitosv1.py
@@ -1,3 +1,5 @@
+from __future__ import print_function
+
from copy import deepcopy
from lxml import etree
@@ -237,4 +239,4 @@ if __name__ == '__main__':
from sfa.rspecs.rspec_elements import *
r = RSpec('/tmp/resources.rspec')
r.load_rspec_elements(SFAv1.elements)
- print r.get(RSpecElements.NODE)
+ print(r.get(RSpecElements.NODE))
diff --git a/sfa/rspecs/versions/ofeliav1.py b/sfa/rspecs/versions/ofeliav1.py
index d074694..0a001cb 100755
--- a/sfa/rspecs/versions/ofeliav1.py
+++ b/sfa/rspecs/versions/ofeliav1.py
@@ -1,5 +1,7 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
+from __future__ import print_function
+
from copy import deepcopy
from lxml import etree
@@ -22,13 +24,13 @@ class Ofelia(RSpecVersion):
schema = 'https://github.com/fp7-ofelia/ocf/blob/ocf.rspecs/openflow/schemas/ad.xsd'
namespace = 'openflow'
extensions = {}
- namespaces = dict(extensions.items() + [('default', namespace)])
+ namespaces = dict(list(extensions.items()) + [('default', namespace)])
#template = '<RSpec type="%s"></RSpec>' % type
template = '<rspec></rspec>'
# Network
def get_networks(self):
- raise Exception, "Not implemented"
+ raise Exception("Not implemented")
network_elems = self.xml.xpath('//network')
networks = [network_elem.get_instance(fields=['name', 'slice']) for \
network_elem in network_elems]
@@ -36,7 +38,7 @@ class Ofelia(RSpecVersion):
def add_network(self, network):
- raise Exception, "Not implemented"
+ raise Exception("Not implemented")
network_tags = self.xml.xpath('//network[@name="%s"]' % network)
if not network_tags:
network_tag = self.xml.add_element('network', name=network)
@@ -231,7 +233,7 @@ if __name__ == '__main__':
import pprint
from sfa.rspecs.rspec import RSpec
from sfa.rspecs.rspec_elements import *
- print "main ofeliav1"
+ print("main ofeliav1")
if len(sys.argv)!=2:
r = RSpec('/tmp/resources.rspec')
else:
diff --git a/sfa/rspecs/versions/pgv2.py b/sfa/rspecs/versions/pgv2.py
index 01a3155..3e25a37 100644
--- a/sfa/rspecs/versions/pgv2.py
+++ b/sfa/rspecs/versions/pgv2.py
@@ -1,5 +1,6 @@
+from __future__ import print_function
+
from copy import deepcopy
-from StringIO import StringIO
from sfa.util.xrn import Xrn
from sfa.rspecs.version import RSpecVersion
from sfa.rspecs.elements.versions.pgv2Link import PGv2Link
@@ -7,6 +8,7 @@ from sfa.rspecs.elements.versions.pgv2Node import PGv2Node
from sfa.rspecs.elements.versions.pgv2SliverType import PGv2SliverType
from sfa.rspecs.elements.versions.pgv2Lease import PGv2Lease
from sfa.util.sfalogging import logger
+from sfa.util.py23 import StringType
class PGv2(RSpecVersion):
type = 'ProtoGENI'
@@ -19,7 +21,7 @@ class PGv2(RSpecVersion):
'planetlab': "http://www.planet-lab.org/resources/sfa/ext/planetlab/1",
'plos': "http://www.planet-lab.org/resources/sfa/ext/plos/1",
}
- namespaces = dict(extensions.items() + [('default', namespace)])
+ namespaces = dict(list(extensions.items()) + [('default', namespace)])
# Networks
def get_networks(self):
@@ -203,7 +205,7 @@ class PGv2(RSpecVersion):
"""
from sfa.rspecs.rspec import RSpec
# just copy over all the child elements under the root element
- if isinstance(in_rspec, basestring):
+ if isinstance(in_rspec, StringType):
in_rspec = RSpec(in_rspec)
nodes = in_rspec.version.get_nodes()
@@ -213,7 +215,7 @@ class PGv2(RSpecVersion):
if not node['component_name']:
# this node element is part of a lease
continue
- if not node.has_key('sliver') or not node['sliver']:
+ if 'sliver' not in node or not node['sliver']:
node['sliver'] = {'name': 'plab-vserver'}
main_nodes.append(node)
self.add_nodes(main_nodes)
@@ -262,4 +264,4 @@ if __name__ == '__main__':
r = RSpec('/tmp/pg.rspec')
r.load_rspec_elements(PGv2.elements)
r.namespaces = PGv2.namespaces
- print r.get(RSpecElements.NODE)
+ print(r.get(RSpecElements.NODE))
diff --git a/sfa/rspecs/versions/pgv3.py b/sfa/rspecs/versions/pgv3.py
index 6d6a449..eb15b82 100644
--- a/sfa/rspecs/versions/pgv3.py
+++ b/sfa/rspecs/versions/pgv3.py
@@ -11,7 +11,7 @@ class GENIv3(PGv2):
'planetlab': "http://www.planet-lab.org/resources/sfa/ext/planetlab/1",
'plos': "http://www.planet-lab.org/resources/sfa/ext/plos/1",
}
- namespaces = dict(extensions.items() + [('default', namespace)])
+ namespaces = dict(list(extensions.items()) + [('default', namespace)])
elements = []
diff --git a/sfa/rspecs/versions/sfav1.py b/sfa/rspecs/versions/sfav1.py
index 6e973e7..645a660 100644
--- a/sfa/rspecs/versions/sfav1.py
+++ b/sfa/rspecs/versions/sfav1.py
@@ -1,3 +1,5 @@
+from __future__ import print_function
+
from copy import deepcopy
from lxml import etree
@@ -239,4 +241,4 @@ if __name__ == '__main__':
from sfa.rspecs.rspec_elements import *
r = RSpec('/tmp/resources.rspec')
r.load_rspec_elements(SFAv1.elements)
- print r.get(RSpecElements.NODE)
+ print(r.get(RSpecElements.NODE))
diff --git a/sfa/server/modpython/SfaAggregateModPython.py b/sfa/server/modpython/SfaAggregateModPython.py
index 50f9f5f..5d38a55 100755
--- a/sfa/server/modpython/SfaAggregateModPython.py
+++ b/sfa/server/modpython/SfaAggregateModPython.py
@@ -50,7 +50,7 @@ def handler(req):
return apache.OK
- except Exception, err:
+ except Exception as err:
# Log error in /var/log/httpd/(ssl_)?error_log
logger.log_exc('%r'%err)
return apache.HTTP_INTERNAL_SERVER_ERROR
diff --git a/sfa/server/modpython/SfaRegistryModPython.py b/sfa/server/modpython/SfaRegistryModPython.py
index 31d0812..6d17cd2 100755
--- a/sfa/server/modpython/SfaRegistryModPython.py
+++ b/sfa/server/modpython/SfaRegistryModPython.py
@@ -50,7 +50,7 @@ def handler(req):
return apache.OK
- except Exception, err:
+ except Exception as err:
# Log error in /var/log/httpd/(ssl_)?error_log
logger.log_exc('%r'%err)
return apache.HTTP_INTERNAL_SERVER_ERROR
diff --git a/sfa/server/modpython/SfaSliceMgrModPython.py b/sfa/server/modpython/SfaSliceMgrModPython.py
index 61cb161..dcb8562 100755
--- a/sfa/server/modpython/SfaSliceMgrModPython.py
+++ b/sfa/server/modpython/SfaSliceMgrModPython.py
@@ -50,7 +50,7 @@ def handler(req):
return apache.OK
- except Exception, err:
+ except Exception as err:
# Log error in /var/log/httpd/(ssl_)?error_log
logger.log_exc('%r'%err)
return apache.HTTP_INTERNAL_SERVER_ERROR
diff --git a/sfa/server/sfa_component_setup.py b/sfa/server/sfa_component_setup.py
index f75f1ca..e35a40a 100755
--- a/sfa/server/sfa_component_setup.py
+++ b/sfa/server/sfa_component_setup.py
@@ -1,4 +1,6 @@
#!/usr/bin/python
+from __future__ import print_function
+
import sys
import os
import tempfile
@@ -24,7 +26,7 @@ def handle_gid_mismatch_exception(f):
try: return f(*args, **kwds)
except ConnectionKeyGIDMismatch:
# clean regen server keypair and try again
- print "cleaning keys and trying again"
+ print("cleaning keys and trying again")
clean_key_cred()
return f(args, kwds)
@@ -48,7 +50,7 @@ def server_proxy(url=None, port=None, keyfile=None, certfile=None,verbose=False)
url = "http://%(addr)s:%(port)s" % locals()
if verbose:
- print "Contacting registry at: %(url)s" % locals()
+ print("Contacting registry at: %(url)s" % locals())
server = SfaServerProxy(url, keyfile, certfile)
return server
@@ -125,11 +127,11 @@ def GetCredential(registry=None, force=False, verbose=False):
# check for existing credential
if not force and os.path.exists(credfile):
if verbose:
- print "Loading Credential from %(credfile)s " % locals()
+ print("Loading Credential from %(credfile)s " % locals())
cred = Credential(filename=credfile).save_to_string(save_parents=True)
else:
if verbose:
- print "Getting credential from registry"
+ print("Getting credential from registry")
# make sure node private key exists
node_pkey_file = config_dir + os.sep + "node.key"
node_gid_file = config_dir + os.sep + "node.gid"
@@ -177,7 +179,7 @@ def get_trusted_certs(registry=None, verbose=False):
registry = server_proxy(url=registry, keyfile=keyfile, certfile=certfile)
# get the trusted certs and save them in the right place
if verbose:
- print "Getting trusted certs from registry"
+ print("Getting trusted certs from registry")
trusted_certs = registry.get_trusted_certs(cred)
trusted_gid_names = []
for gid_str in trusted_certs:
@@ -187,7 +189,7 @@ def get_trusted_certs(registry=None, verbose=False):
trusted_gid_names.append(relative_filename)
gid_filename = trusted_certs_dir + os.sep + relative_filename
if verbose:
- print "Writing GID for %s as %s" % (gid.get_hrn(), gid_filename)
+ print("Writing GID for %s as %s" % (gid.get_hrn(), gid_filename))
gid.save_to_file(gid_filename, save_parents=True)
# remove old certs
@@ -195,7 +197,7 @@ def get_trusted_certs(registry=None, verbose=False):
for gid_name in all_gids_names:
if gid_name not in trusted_gid_names:
if verbose:
- print "Removing old gid ", gid_name
+ print("Removing old gid ", gid_name)
os.unlink(trusted_certs_dir + os.sep + gid_name)
@handle_gid_mismatch_exception
@@ -222,7 +224,7 @@ def get_gids(registry=None, verbose=False):
registry = server_proxy(url=registry, keyfile=keyfile, certfile=certfile)
if verbose:
- print "Getting current slices on this node"
+ print("Getting current slices on this node")
# get a list of slices on this node
from sfa.generic import Generic
generic=Generic.the_flavour()
@@ -247,7 +249,7 @@ def get_gids(registry=None, verbose=False):
return
if verbose:
- print "Getting gids for slices on this node from registry"
+ print("Getting gids for slices on this node from registry")
# get the gids
# and save them in the right palce
records = registry.GetGids(hrns, cred)
@@ -265,12 +267,12 @@ def get_gids(registry=None, verbose=False):
gid = record['gid']
slice_gid_filename = os.sep.join([vserver_path, "etc", "slice.gid"])
if verbose:
- print "Saving GID for %(slicename)s as %(slice_gid_filename)s" % locals()
+ print("Saving GID for %(slicename)s as %(slice_gid_filename)s" % locals())
GID(string=gid).save_to_file(slice_gid_filename, save_parents=True)
# save the node gid in /etc/sfa
node_gid_filename = os.sep.join([vserver_path, "etc", "node.gid"])
if verbose:
- print "Saving node GID for %(slicename)s as %(node_gid_filename)s" % locals()
+ print("Saving node GID for %(slicename)s as %(node_gid_filename)s" % locals())
node_gid.save_to_file(node_gid_filename, save_parents=True)
@@ -279,15 +281,15 @@ def dispatch(options, args):
create_default_dirs()
if options.key:
if options.verbose:
- print "Getting the component's pkey"
+ print("Getting the component's pkey")
get_node_key(registry=options.registry, verbose=options.verbose)
if options.certs:
if options.verbose:
- print "Getting the component's trusted certs"
+ print("Getting the component's trusted certs")
get_trusted_certs(verbose=options.verbose)
if options.gids:
if options.verbose:
- print "Geting the component's GIDs"
+ print("Geting the component's GIDs")
get_gids(verbose=options.verbose)
def main():
diff --git a/sfa/server/sfaapi.py b/sfa/server/sfaapi.py
index 9911e46..499e3bd 100644
--- a/sfa/server/sfaapi.py
+++ b/sfa/server/sfaapi.py
@@ -19,7 +19,7 @@ from sfa.storage.alchemy import alchemy
####################
class SfaApi (XmlrpcApi):
"""
- An SfaApi instance is a basic xmlrcp service
+ An SfaApi instance is a basic xmlrpc service
augmented with the local cryptographic material and hrn
It also has the notion of its own interface (a string describing
diff --git a/sfa/server/threadedserver.py b/sfa/server/threadedserver.py
index 7dfac7d..7a26ad2 100644
--- a/sfa/server/threadedserver.py
+++ b/sfa/server/threadedserver.py
@@ -11,7 +11,6 @@ import socket
import traceback
import threading
from Queue import Queue
-import xmlrpclib
import SocketServer
import BaseHTTPServer
import SimpleXMLRPCServer
@@ -22,6 +21,7 @@ from sfa.util.config import Config
from sfa.util.cache import Cache
from sfa.trust.certificate import Certificate
from sfa.trust.trustedroots import TrustedRoots
+from sfa.util.py23 import xmlrpc_client
# don't hard code an api class anymore here
from sfa.generic import Generic
@@ -119,7 +119,7 @@ class SecureXMLRpcRequestHandler(SimpleXMLRPCServer.SimpleXMLRPCRequestHandler):
remote_addr = (remote_ip, remote_port) = self.connection.getpeername()
self.api.remote_addr = remote_addr
response = self.api.handle(remote_addr, request, self.server.method_map)
- except Exception, fault:
+ except Exception as fault:
# This should only happen if the module is buggy
# internal error, report as HTTP server error
logger.log_exc("server.do_POST")
@@ -195,7 +195,7 @@ class SecureXMLRPCServer(BaseHTTPServer.HTTPServer,SimpleXMLRPCServer.SimpleXMLR
# can't use format_exc() as it is not available in jython yet
# (even in trunk).
type, value, tb = sys.exc_info()
- raise xmlrpclib.Fault(1,''.join(traceback.format_exception(type, value, tb)))
+ raise xmlrpc_client.Fault(1,''.join(traceback.format_exception(type, value, tb)))
# override this one from the python 2.7 code
# originally defined in class TCPServer
diff --git a/sfa/server/xmlrpcapi.py b/sfa/server/xmlrpcapi.py
index b6f8e95..74e0026 100644
--- a/sfa/server/xmlrpcapi.py
+++ b/sfa/server/xmlrpcapi.py
@@ -3,7 +3,6 @@
#
import string
-import xmlrpclib
# SOAP support is optional
try:
@@ -19,6 +18,7 @@ except ImportError:
#from sfa.util.faults import SfaNotImplemented, SfaAPIError, SfaInvalidAPIMethod, SfaFault
from sfa.util.faults import SfaInvalidAPIMethod, SfaAPIError, SfaFault
from sfa.util.sfalogging import logger
+from sfa.util.py23 import xmlrpc_client
####################
# See "2.2 Characters" in the XML specification:
@@ -71,12 +71,14 @@ def xmlrpclib_dump(self, value, write):
if isinstance(value, Type):
f(*args)
return
- raise TypeError, "cannot marshal %s objects" % type(value)
+ raise TypeError("cannot marshal %s objects" % type(value))
else:
f(*args)
# You can't hide from me!
-xmlrpclib.Marshaller._Marshaller__dump = xmlrpclib_dump
+# Note: not quite sure if this will still cause
+# the expected behaviour under python3
+xmlrpc_client.Marshaller._Marshaller__dump = xmlrpclib_dump
class XmlrpcApi:
"""
@@ -102,7 +104,7 @@ class XmlrpcApi:
"""
# Look up method
if method not in self.methods:
- raise SfaInvalidAPIMethod, method
+ raise SfaInvalidAPIMethod(method)
# Get new instance of method
try:
@@ -112,7 +114,7 @@ class XmlrpcApi:
return getattr(module, classname)(self)
except (ImportError, AttributeError):
self.logger.log_exc("Error importing method: %s" % method)
- raise SfaInvalidAPIMethod, method
+ raise SfaInvalidAPIMethod(method)
def call(self, source, method, *args):
"""
@@ -131,14 +133,14 @@ class XmlrpcApi:
"""
# Parse request into method name and arguments
try:
- interface = xmlrpclib
- self.protocol = 'xmlrpclib'
- (args, method) = xmlrpclib.loads(data)
- if method_map.has_key(method):
+ interface = xmlrpc_client
+ self.protocol = 'xmlrpc'
+ (args, method) = xmlrpc_client.loads(data)
+ if method in method_map:
method = method_map[method]
methodresponse = True
- except Exception, e:
+ except Exception as e:
if SOAPpy is not None:
self.protocol = 'soap'
interface = SOAPpy
@@ -151,10 +153,10 @@ class XmlrpcApi:
try:
result = self.call(source, method, *args)
- except SfaFault, fault:
+ except SfaFault as fault:
result = fault
self.logger.log_exc("XmlrpcApi.handle has caught Exception")
- except Exception, fault:
+ except Exception as fault:
self.logger.log_exc("XmlrpcApi.handle has caught Exception")
result = SfaAPIError(fault)
@@ -168,10 +170,10 @@ class XmlrpcApi:
convert result to a valid xmlrpc or soap response
"""
- if self.protocol == 'xmlrpclib':
+ if self.protocol == 'xmlrpc':
if not isinstance(result, SfaFault):
result = (result,)
- response = xmlrpclib.dumps(result, methodresponse = True, encoding = self.encoding, allow_none = 1)
+ response = xmlrpc_client.dumps(result, methodresponse = True, encoding = self.encoding, allow_none = 1)
elif self.protocol == 'soap':
if isinstance(result, Exception):
result = faultParameter(NS.ENV_T + ":Server", "Method Failed", method)
diff --git a/sfa/storage/alchemy.py b/sfa/storage/alchemy.py
index f052fba..64c39cf 100644
--- a/sfa/storage/alchemy.py
+++ b/sfa/storage/alchemy.py
@@ -1,5 +1,3 @@
-from types import StringTypes
-
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker
@@ -39,7 +37,7 @@ class Alchemy:
except:
pass
self.engine=None
- raise Exception,"Could not connect to database %s as %s with psycopg2"%(dbname,config.SFA_DB_USER)
+ raise Exception("Could not connect to database %s as %s with psycopg2"%(dbname,config.SFA_DB_USER))
# expects boolean True: debug is ON or False: debug is OFF
diff --git a/sfa/storage/dbschema.py b/sfa/storage/dbschema.py
index 4c99fb2..5166b93 100644
--- a/sfa/storage/dbschema.py
+++ b/sfa/storage/dbschema.py
@@ -1,3 +1,5 @@
+from __future__ import print_function
+
import sys
import traceback
@@ -62,10 +64,10 @@ class DBSchema:
def drop_table (self, tablename):
if self.table_exists (tablename):
- print >>sys.stderr, "%s: Dropping table %s"%(DBSchema.header,tablename)
+ print("%s: Dropping table %s"%(DBSchema.header,tablename), file=sys.stderr)
self.engine.execute ("drop table %s cascade"%tablename)
else:
- print >>sys.stderr, "%s: no need to drop table %s"%(DBSchema.header,tablename)
+ print("%s: no need to drop table %s"%(DBSchema.header,tablename), file=sys.stderr)
def handle_old_releases (self):
try:
@@ -75,7 +77,7 @@ class DBSchema:
# (.) we've just created the db, so it's either a fresh install, or
# (.) we come from a 'very old' depl.
# in either case, an import is required but there's nothing to clean up
- print >> sys.stderr,"%s: make sure to run import"%(DBSchema.header,)
+ print("%s: make sure to run import"%(DBSchema.header,), file=sys.stderr)
elif self.table_exists ('sfa_db_version'):
# we come from an 'old' version
self.drop_table ('records')
@@ -85,7 +87,7 @@ class DBSchema:
# we should be good here
pass
except:
- print >> sys.stderr, "%s: unknown exception"%(DBSchema.header,)
+ print("%s: unknown exception"%(DBSchema.header,), file=sys.stderr)
traceback.print_exc ()
# after this call the db schema and the version as known by migrate should
diff --git a/sfa/storage/model.py b/sfa/storage/model.py
index 051ba87..923576e 100644
--- a/sfa/storage/model.py
+++ b/sfa/storage/model.py
@@ -1,4 +1,3 @@
-from types import StringTypes
from datetime import datetime
from sqlalchemy import or_, and_
@@ -14,6 +13,7 @@ from sfa.storage.record import Record
from sfa.util.sfalogging import logger
from sfa.util.sfatime import utcparse, datetime_to_string
from sfa.util.xml import XML
+from sfa.util.py23 import StringType
from sfa.trust.gid import GID
@@ -99,7 +99,7 @@ class RegRecord(Base, AlchemyObj):
if type: self.type=type
if hrn: self.hrn=hrn
if gid:
- if isinstance(gid, StringTypes): self.gid=gid
+ if isinstance(gid, StringType): self.gid=gid
else: self.gid=gid.save_to_string(save_parents=True)
if authority: self.authority=authority
if peer_authority: self.peer_authority=peer_authority
@@ -130,7 +130,7 @@ class RegRecord(Base, AlchemyObj):
@validates ('gid')
def validate_gid (self, key, gid):
if gid is None: return
- elif isinstance(gid, StringTypes): return gid
+ elif isinstance(gid, StringType): return gid
else: return gid.save_to_string(save_parents=True)
def validate_datetime (self, key, incoming):
diff --git a/sfa/storage/parameter.py b/sfa/storage/parameter.py
index e520bfc..dc9d5b5 100644
--- a/sfa/storage/parameter.py
+++ b/sfa/storage/parameter.py
@@ -5,9 +5,11 @@
# Copyright (C) 2006 The Trustees of Princeton University
#
-from types import NoneType, IntType, LongType, FloatType, StringTypes, DictType, TupleType, ListType
+from types import NoneType, IntType, LongType, FloatType, DictType, TupleType, ListType
from sfa.util.faults import SfaAPIError
+from sfa.util.py23 import StringType
+
class Parameter:
"""
Typed value wrapper. Use in accepts and returns to document method
@@ -89,7 +91,7 @@ def xmlrpc_type(arg):
return "boolean"
elif arg_type == FloatType:
return "double"
- elif arg_type in StringTypes:
+ elif issubclass(arg_type, StringType):
return "string"
elif arg_type == ListType or arg_type == TupleType:
return "array"
@@ -100,4 +102,4 @@ def xmlrpc_type(arg):
# documentation purposes.
return "mixed"
else:
- raise SfaAPIError, "XML-RPC cannot marshal %s objects" % arg_type
+ raise SfaAPIError("XML-RPC cannot marshal %s objects" % arg_type)
diff --git a/sfa/storage/record.py b/sfa/storage/record.py
index 8600723..a03ce30 100644
--- a/sfa/storage/record.py
+++ b/sfa/storage/record.py
@@ -1,10 +1,12 @@
+from __future__ import print_function
+
from sfa.util.sfatime import utcparse, datetime_to_string
-from types import StringTypes
from datetime import datetime
from sfa.util.xml import XML
from sfa.trust.gid import GID
from sfa.util.sfalogging import logger
+from sfa.util.py23 import StringType
class Record:
@@ -54,9 +56,9 @@ class Record:
def load_from_dict (self, d):
for (k,v) in d.iteritems():
# experimental
- if isinstance(v, StringTypes) and v.lower() in ['true']:
+ if isinstance(v, StringType) and v.lower() in ['true']:
v = True
- if isinstance(v, StringTypes) and v.lower() in ['false']:
+ if isinstance(v, StringType) and v.lower() in ['false']:
v = False
setattr(self, k, v)
@@ -82,15 +84,15 @@ class Record:
if format == 'text':
self.dump_text(dump_parents,sort=sort)
elif format == 'xml':
- print self.save_as_xml()
+ print(self.save_as_xml())
elif format == 'simple':
- print self.dump_simple()
+ print(self.dump_simple())
else:
- raise Exception, "Invalid format %s" % format
+ raise Exception("Invalid format %s" % format)
def dump_text(self, dump_parents=False, sort=False):
- print 40*'='
- print "RECORD"
+ print(40*'=')
+ print("RECORD")
# print remaining fields
fields = self.fields()
if sort: fields.sort()
@@ -102,12 +104,12 @@ class Record:
if callable (attrib): continue
# handle gid
if attrib_name == 'gid':
- print " gid:"
- print GID(string=attrib).dump_string(8, dump_parents)
+ print(" gid:")
+ print(GID(string=attrib).dump_string(8, dump_parents))
elif attrib_name in ['date created', 'last updated']:
- print " %s: %s" % (attrib_name, self.date_repr(attrib_name))
+ print(" %s: %s" % (attrib_name, self.date_repr(attrib_name)))
else:
- print " %s: %s" % (attrib_name, attrib)
+ print(" %s: %s" % (attrib_name, attrib))
def dump_simple(self):
return "%s"%self
diff --git a/sfa/trust/abac_credential.py b/sfa/trust/abac_credential.py
index f454d18..cb6e686 100644
--- a/sfa/trust/abac_credential.py
+++ b/sfa/trust/abac_credential.py
@@ -21,13 +21,16 @@
# IN THE WORK.
#----------------------------------------------------------------------
+from __future__ import print_function
+
from sfa.trust.credential import Credential, append_sub, DEFAULT_CREDENTIAL_LIFETIME
from sfa.util.sfalogging import logger
from sfa.util.sfatime import SFATIME_FORMAT
-from StringIO import StringIO
from xml.dom.minidom import Document, parseString
+from sfa.util.py23 import StringIO
+
HAVELXML = False
try:
from lxml import etree
@@ -179,7 +182,7 @@ class ABACCredential(Credential):
result += "\nEnd XML\n"
except:
import traceback
- print "exc. Credential.dump_string / XML"
+ print("exc. Credential.dump_string / XML")
traceback.print_exc()
return result
diff --git a/sfa/trust/auth.py b/sfa/trust/auth.py
index 86d5d4f..512c58b 100644
--- a/sfa/trust/auth.py
+++ b/sfa/trust/auth.py
@@ -2,13 +2,13 @@
# SfaAPI authentication
#
import sys
-from types import StringTypes
from sfa.util.faults import InsufficientRights, MissingCallerGID, \
MissingTrustedRoots, PermissionError, BadRequestHash, \
ConnectionKeyGIDMismatch, SfaPermissionDenied, CredentialNotVerifiable, \
Forbidden, BadArgs
from sfa.util.sfalogging import logger
+from sfa.util.py23 import StringType
from sfa.util.config import Config
from sfa.util.xrn import Xrn, get_authority
@@ -62,7 +62,7 @@ class Auth:
if xrns is None: xrns = []
error = (None, None)
def log_invalid_cred(cred):
- if not isinstance (cred, StringTypes):
+ if not isinstance (cred, StringType):
logger.info("cannot validate credential %s - expecting a string"%cred)
error = ('TypeMismatch',
"checkCredentials: expected a string, received {} -- {}"
diff --git a/sfa/trust/certificate.py b/sfa/trust/certificate.py
index ff6534e..a30d73a 100644
--- a/sfa/trust/certificate.py
+++ b/sfa/trust/certificate.py
@@ -35,15 +35,19 @@
##
#
+from __future__ import print_function
+
import functools
import os
import tempfile
import base64
from tempfile import mkstemp
-from OpenSSL import crypto
-import M2Crypto
-from M2Crypto import X509
+import OpenSSL
+# M2Crypto is imported on the fly to minimize crashes
+#import M2Crypto
+
+from sfa.util.py23 import PY3
from sfa.util.faults import CertExpired, CertMissingParent, CertNotSignedByParent
from sfa.util.sfalogging import logger
@@ -80,7 +84,7 @@ def set_passphrase(passphrase):
def test_passphrase(string, passphrase):
try:
- crypto.load_privatekey(crypto.FILETYPE_PEM, string, (lambda x: passphrase))
+ OpenSSL.crypto.load_privatekey(OpenSSL.crypto.FILETYPE_PEM, string, (lambda x: passphrase))
return True
except:
return False
@@ -88,11 +92,11 @@ def test_passphrase(string, passphrase):
def convert_public_key(key):
keyconvert_path = "/usr/bin/keyconvert.py"
if not os.path.isfile(keyconvert_path):
- raise IOError, "Could not find keyconvert in %s" % keyconvert_path
+ raise IOError("Could not find keyconvert in {}".format(keyconvert_path))
# we can only convert rsa keys
if "ssh-dss" in key:
- raise Exception, "keyconvert: dss keys are not supported"
+ raise Exception("keyconvert: dss keys are not supported")
(ssh_f, ssh_fn) = tempfile.mkstemp()
ssl_fn = tempfile.mktemp()
@@ -106,7 +110,7 @@ def convert_public_key(key):
# that it can be expected to see why it failed.
# TODO: for production, cleanup the temporary files
if not os.path.exists(ssl_fn):
- raise Exception, "keyconvert: generated certificate not found. keyconvert may have failed."
+ raise Exception("keyconvert: generated certificate not found. keyconvert may have failed.")
k = Keypair()
try:
@@ -135,8 +139,8 @@ class Keypair:
# Creates a Keypair object
# @param create If create==True, creates a new public/private key and
# stores it in the object
- # @param string If string!=None, load the keypair from the string (PEM)
- # @param filename If filename!=None, load the keypair from the file
+ # @param string If string != None, load the keypair from the string (PEM)
+ # @param filename If filename != None, load the keypair from the file
def __init__(self, create=False, string=None, filename=None):
if create:
@@ -150,8 +154,8 @@ class Keypair:
# Create a RSA public/private key pair and store it inside the keypair object
def create(self):
- self.key = crypto.PKey()
- self.key.generate_key(crypto.TYPE_RSA, 2048)
+ self.key = OpenSSL.crypto.PKey()
+ self.key.generate_key(OpenSSL.crypto.TYPE_RSA, 2048)
##
# Save the private key to a file
@@ -159,13 +163,13 @@ class Keypair:
def save_to_file(self, filename):
open(filename, 'w').write(self.as_pem())
- self.filename=filename
+ self.filename = filename
##
# Load the private key from a file. Implicity the private key includes the public key.
def load_from_file(self, filename):
- self.filename=filename
+ self.filename = filename
buffer = open(filename, 'r').read()
self.load_from_string(buffer)
@@ -173,19 +177,21 @@ class Keypair:
# Load the private key from a string. Implicitly the private key includes the public key.
def load_from_string(self, string):
+ import M2Crypto
if glo_passphrase_callback:
- self.key = crypto.load_privatekey(
- crypto.FILETYPE_PEM, string, functools.partial(glo_passphrase_callback, self, string))
+ self.key = OpenSSL.crypto.load_privatekey(
+ OpenSSL.crypto.FILETYPE_PEM, string, functools.partial(glo_passphrase_callback, self, string))
self.m2key = M2Crypto.EVP.load_key_string(
string, functools.partial(glo_passphrase_callback, self, string))
else:
- self.key = crypto.load_privatekey(crypto.FILETYPE_PEM, string)
+ self.key = OpenSSL.crypto.load_privatekey(OpenSSL.crypto.FILETYPE_PEM, string)
self.m2key = M2Crypto.EVP.load_key_string(string)
##
# Load the public key from a string. No private key is loaded.
def load_pubkey_from_file(self, filename):
+ import M2Crypto
# load the m2 public key
m2rsakey = M2Crypto.RSA.load_pub_key(filename)
self.m2key = M2Crypto.EVP.PKey()
@@ -211,11 +217,11 @@ class Keypair:
# convert the m2 x509 cert to a pyopenssl x509
m2pem = m2x509.as_pem()
- pyx509 = crypto.load_certificate(crypto.FILETYPE_PEM, m2pem)
+ pyx509 = OpenSSL.crypto.load_certificate(OpenSSL.crypto.FILETYPE_PEM, m2pem)
# get the pyopenssl pkey from the pyopenssl x509
self.key = pyx509.get_pubkey()
- self.filename=filename
+ self.filename = filename
##
# Load the public key from a string. No private key is loaded.
@@ -231,12 +237,13 @@ class Keypair:
# Return the private key in PEM format.
def as_pem(self):
- return crypto.dump_privatekey(crypto.FILETYPE_PEM, self.key)
+ return OpenSSL.crypto.dump_privatekey(OpenSSL.crypto.FILETYPE_PEM, self.key)
##
# Return an M2Crypto key object
def get_m2_pubkey(self):
+ import M2Crypto
if not self.m2key:
self.m2key = M2Crypto.EVP.load_key_string(self.as_pem())
return self.m2key
@@ -267,6 +274,7 @@ class Keypair:
return base64.b64encode(k.sign_final())
def verify_string(self, data, sig):
+ import M2Crypto
k = self.get_m2_pubkey()
k.verify_init()
k.verify_update(data)
@@ -279,14 +287,14 @@ class Keypair:
def get_filename(self):
return getattr(self,'filename',None)
- def dump (self, *args, **kwargs):
- print self.dump_string(*args, **kwargs)
+ def dump(self, *args, **kwargs):
+ print(self.dump_string(*args, **kwargs))
- def dump_string (self):
- result=""
- result += "KEYPAIR: pubkey=%40s..."%self.get_pubkey_string()
- filename=self.get_filename()
- if filename: result += "Filename %s\n"%filename
+ def dump_string(self):
+ result = ""
+ result += "KEYPAIR: pubkey={:>40}...".format(self.get_pubkey_string())
+ filename = self.get_filename()
+ if filename: result += "Filename {}\n".format(filename)
return result
##
@@ -310,7 +318,7 @@ class Certificate:
# parent = None
isCA = None # will be a boolean once set
- separator="-----parent-----"
+ separator = "-----parent-----"
##
# Create a certificate object.
@@ -347,7 +355,7 @@ class Certificate:
# Create a blank X509 certificate and store it in this object.
def create(self, lifeDays=1825):
- self.x509 = crypto.X509()
+ self.x509 = OpenSSL.crypto.X509()
# FIXME: Use different serial #s
self.x509.set_serial_number(3)
self.x509.gmtime_adj_notBefore(0) # 0 means now
@@ -377,7 +385,8 @@ class Certificate:
# If it's not in proper PEM format, wrap it
if string.count('-----BEGIN CERTIFICATE') == 0:
- string = '-----BEGIN CERTIFICATE-----\n%s\n-----END CERTIFICATE-----' % string
+ string = '-----BEGIN CERTIFICATE-----\n{}\n-----END CERTIFICATE-----'\
+ .format(string)
# If there is a PEM cert in there, but there is some other text first
# such as the text of the certificate, skip the text
@@ -395,10 +404,10 @@ class Certificate:
else:
parts = string.split(Certificate.separator, 1)
- self.x509 = crypto.load_certificate(crypto.FILETYPE_PEM, parts[0])
+ self.x509 = OpenSSL.crypto.load_certificate(OpenSSL.crypto.FILETYPE_PEM, parts[0])
if self.x509 is None:
- logger.warn("Loaded from string but cert is None: %s" % string)
+ logger.warn("Loaded from string but cert is None: {}".format(string))
# if there are more certs, then create a parent and let the parent load
# itself from the remainder of the string
@@ -413,7 +422,7 @@ class Certificate:
file = open(filename)
string = file.read()
self.load_from_string(string)
- self.filename=filename
+ self.filename = filename
##
# Save the certificate to a string.
@@ -424,7 +433,9 @@ class Certificate:
if self.x509 is None:
logger.warn("None cert in certificate.save_to_string")
return ""
- string = crypto.dump_certificate(crypto.FILETYPE_PEM, self.x509)
+ string = OpenSSL.crypto.dump_certificate(OpenSSL.crypto.FILETYPE_PEM, self.x509)
+ if PY3 and isinstance(string, bytes):
+ string = string.decode()
if save_parents and self.parent:
string = string + self.parent.save_to_string(save_parents)
return string
@@ -439,9 +450,11 @@ class Certificate:
f = filep
else:
f = open(filename, 'w')
+ if PY3 and isinstance(string, bytes):
+ string = string.decode()
f.write(string)
f.close()
- self.filename=filename
+ self.filename = filename
##
# Save the certificate to a random file in /tmp/
@@ -464,9 +477,9 @@ class Certificate:
# it's a mistake to use subject and cert params at the same time
assert(not cert)
if isinstance(subject, dict) or isinstance(subject, str):
- req = crypto.X509Req()
+ req = OpenSSL.crypto.X509Req()
reqSubject = req.get_subject()
- if (isinstance(subject, dict)):
+ if isinstance(subject, dict):
for key in reqSubject.keys():
setattr(reqSubject, key, subject[key])
else:
@@ -491,9 +504,9 @@ class Certificate:
# Set the subject name of the certificate
def set_subject(self, name):
- req = crypto.X509Req()
+ req = OpenSSL.crypto.X509Req()
subj = req.get_subject()
- if (isinstance(name, dict)):
+ if isinstance(name, dict):
for key in name.keys():
setattr(subj, key, name[key])
else:
@@ -530,7 +543,7 @@ class Certificate:
counter = 0
filtered = [self.filter_chunk(chunk) for chunk in data.split()]
message += " ".join( [f for f in filtered if f])
- omitted = len ([f for f in filtered if not f])
+ omitted = len([f for f in filtered if not f])
if omitted:
message += "..+{} omitted".format(omitted)
message += "]"
@@ -550,7 +563,8 @@ class Certificate:
# It is returned in the form of a Keypair object.
def get_pubkey(self):
- m2x509 = X509.load_cert_string(self.save_to_string())
+ import M2Crypto
+ m2x509 = M2Crypto.X509.load_cert_string(self.save_to_string())
pkey = Keypair()
pkey.key = self.x509.get_pubkey()
pkey.m2key = m2x509.get_pubkey()
@@ -568,7 +582,8 @@ class Certificate:
if self.isCA != None:
# Can't double set properties
- raise Exception, "Cannot set basicConstraints CA:?? more than once. Was %s, trying to set as %s" % (self.isCA, val)
+ raise Exception("Cannot set basicConstraints CA:?? more than once. "
+ "Was {}, trying to set as {}%s".format(self.isCA, val))
self.isCA = val
if val:
@@ -587,6 +602,7 @@ class Certificate:
# @param value string containing value of the extension
def add_extension(self, name, critical, value):
+ import M2Crypto
oldExtVal = None
try:
oldExtVal = self.get_extension(name)
@@ -604,9 +620,9 @@ class Certificate:
# FIXME: What if they are trying to set with a different value?
# Is this ever OK? Or should we raise an exception?
# elif oldExtVal:
-# raise "Cannot add extension %s which had val %s with new val %s" % (name, oldExtVal, value)
+# raise "Cannot add extension {} which had val {} with new val {}".format(name, oldExtVal, value)
- ext = crypto.X509Extension (name, critical, value)
+ ext = OpenSSL.crypto.X509Extension(name, critical, value)
self.x509.add_extensions([ext])
##
@@ -614,6 +630,7 @@ class Certificate:
def get_extension(self, name):
+ import M2Crypto
if name is None:
return None
@@ -621,7 +638,7 @@ class Certificate:
if certstr is None or certstr == "":
return None
# pyOpenSSL does not have a way to get extensions
- m2x509 = X509.load_cert_string(certstr)
+ m2x509 = M2Crypto.X509.load_cert_string(certstr)
if m2x509 is None:
logger.warn("No cert loaded in get_extension")
return None
@@ -639,8 +656,8 @@ class Certificate:
def set_data(self, str, field='subjectAltName'):
# pyOpenSSL only allows us to add extensions, so if we try to set the
# same extension more than once, it will not work
- if self.data.has_key(field):
- raise "Cannot set ", field, " more than once"
+ if field in self.data:
+ raise Exception("Cannot set {} more than once".format(field))
self.data[field] = str
self.add_extension(field, 0, str)
@@ -648,7 +665,7 @@ class Certificate:
# Return the data string that was previously set with set_data
def get_data(self, field='subjectAltName'):
- if self.data.has_key(field):
+ if field in self.data:
return self.data[field]
try:
@@ -676,8 +693,9 @@ class Certificate:
# did not sign the certificate, then an exception will be thrown.
def verify(self, pubkey):
+ import M2Crypto
# pyOpenSSL does not have a way to verify signatures
- m2x509 = X509.load_cert_string(self.save_to_string())
+ m2x509 = M2Crypto.X509.load_cert_string(self.save_to_string())
m2pubkey = pubkey.get_m2_pubkey()
# verify it
# verify returns -1 or 0 on failure depending on how serious the
@@ -752,7 +770,8 @@ class Certificate:
# verify expiration time
if self.x509.has_expired():
if debug_verify_chain:
- logger.debug("verify_chain: NO, Certificate %s has expired" % self.pretty_cert())
+ logger.debug("verify_chain: NO, Certificate {} has expired"
+ .format(self.pretty_cert()))
raise CertExpired(self.pretty_cert(), "client cert")
# if this cert is signed by a trusted_cert, then we are set
@@ -761,35 +780,38 @@ class Certificate:
# verify expiration of trusted_cert ?
if not trusted_cert.x509.has_expired():
if debug_verify_chain:
- logger.debug("verify_chain: YES. Cert %s signed by trusted cert %s"%(
- self.pretty_cert(), trusted_cert.pretty_cert()))
+ logger.debug("verify_chain: YES. Cert {} signed by trusted cert {}"
+ .format(self.pretty_cert(), trusted_cert.pretty_cert()))
return trusted_cert
else:
if debug_verify_chain:
- logger.debug("verify_chain: NO. Cert %s is signed by trusted_cert %s, but that signer is expired..."%(
- self.pretty_cert(),trusted_cert.pretty_cert()))
- raise CertExpired(self.pretty_cert()," signer trusted_cert %s"%trusted_cert.pretty_cert())
+ logger.debug("verify_chain: NO. Cert {} is signed by trusted_cert {}, "
+ "but that signer is expired..."
+ .format(self.pretty_cert(),trusted_cert.pretty_cert()))
+ raise CertExpired("{} signer trusted_cert {}"
+ .format(self.pretty_cert(), trusted_cert.pretty_cert()))
# if there is no parent, then no way to verify the chain
if not self.parent:
if debug_verify_chain:
- logger.debug("verify_chain: NO. %s has no parent and issuer %s is not in %d trusted roots"%\
- (self.pretty_cert(), self.get_issuer(), len(trusted_certs)))
- raise CertMissingParent(self.pretty_cert() + \
- ": Issuer %s is not one of the %d trusted roots, and cert has no parent." %\
- (self.get_issuer(), len(trusted_certs)))
+ logger.debug("verify_chain: NO. {} has no parent "
+ "and issuer {} is not in {} trusted roots"
+ .format(self.pretty_cert(), self.get_issuer(), len(trusted_certs)))
+ raise CertMissingParent("{}: Issuer {} is not one of the {} trusted roots, "
+ "and cert has no parent."
+ .format(self.pretty_cert(), self.get_issuer(), len(trusted_certs)))
# if it wasn't signed by the parent...
if not self.is_signed_by_cert(self.parent):
if debug_verify_chain:
- logger.debug("verify_chain: NO. %s is not signed by parent %s, but by %s"%\
- (self.pretty_cert(),
- self.parent.pretty_cert(),
- self.get_issuer()))
- raise CertNotSignedByParent("%s: Parent %s, issuer %s"\
- % (self.pretty_cert(),
- self.parent.pretty_cert(),
- self.get_issuer()))
+ logger.debug("verify_chain: NO. {} is not signed by parent {}, but by {}"
+ .format(self.pretty_cert(),
+ self.parent.pretty_cert(),
+ self.get_issuer()))
+ raise CertNotSignedByParent("{}: Parent {}, issuer {}"
+ .format(self.pretty_cert(),
+ self.parent.pretty_cert(),
+ self.get_issuer()))
# Confirm that the parent is a CA. Only CAs can be trusted as
# signers.
@@ -798,35 +820,36 @@ class Certificate:
# Ugly - cert objects aren't parsed so we need to read the
# extension and hope there are no other basicConstraints
if not self.parent.isCA and not (self.parent.get_extension('basicConstraints') == 'CA:TRUE'):
- logger.warn("verify_chain: cert %s's parent %s is not a CA" % \
- (self.pretty_cert(), self.parent.pretty_cert()))
- raise CertNotSignedByParent("%s: Parent %s not a CA" % (self.pretty_cert(),
- self.parent.pretty_cert()))
+ logger.warn("verify_chain: cert {}'s parent {} is not a CA"
+ .format(self.pretty_cert(), self.parent.pretty_cert()))
+ raise CertNotSignedByParent("{}: Parent {} not a CA"
+ .format(self.pretty_cert(), self.parent.pretty_cert()))
# if the parent isn't verified...
if debug_verify_chain:
- logger.debug("verify_chain: .. %s, -> verifying parent %s"%\
- (self.pretty_cert(),self.parent.pretty_cert()))
+ logger.debug("verify_chain: .. {}, -> verifying parent {}"
+ .format(self.pretty_cert(),self.parent.pretty_cert()))
self.parent.verify_chain(trusted_certs)
return
### more introspection
def get_extensions(self):
+ import M2Crypto
# pyOpenSSL does not have a way to get extensions
triples = []
- m2x509 = X509.load_cert_string(self.save_to_string())
+ m2x509 = M2Crypto.X509.load_cert_string(self.save_to_string())
nb_extensions = m2x509.get_ext_count()
- logger.debug("X509 had %d extensions"%nb_extensions)
+ logger.debug("X509 had {} extensions".format(nb_extensions))
for i in range(nb_extensions):
- ext=m2x509.get_ext_at(i)
+ ext = m2x509.get_ext_at(i)
triples.append( (ext.get_name(), ext.get_value(), ext.get_critical(),) )
return triples
def get_data_names(self):
return self.data.keys()
- def get_all_datas (self):
+ def get_all_datas(self):
triples = self.get_extensions()
for name in self.get_data_names():
triples.append( (name,self.get_data(name),'data',) )
@@ -836,21 +859,22 @@ class Certificate:
def get_filename(self):
return getattr(self,'filename',None)
- def dump (self, *args, **kwargs):
- print self.dump_string(*args, **kwargs)
+ def dump(self, *args, **kwargs):
+ print(self.dump_string(*args, **kwargs))
- def dump_string (self,show_extensions=False):
+ def dump_string(self, show_extensions=False):
result = ""
- result += "CERTIFICATE for %s\n"%self.pretty_cert()
- result += "Issued by %s\n"%self.get_issuer()
- filename=self.get_filename()
- if filename: result += "Filename %s\n"%filename
+ result += "CERTIFICATE for {}\n".format(self.pretty_cert())
+ result += "Issued by {}\n".format(self.get_issuer())
+ filename = self.get_filename()
+ if filename:
+ result += "Filename {}\n".format(filename)
if show_extensions:
all_datas = self.get_all_datas()
- result += " has %d extensions/data attached"%len(all_datas)
- for (n, v, c) in all_datas:
- if c=='data':
- result += " data: %s=%s\n"%(n,v)
+ result += " has {} extensions/data attached".format(len(all_datas))
+ for n, v, c in all_datas:
+ if c == 'data':
+ result += " data: {}={}\n".format(n, v)
else:
- result += " ext: %s (crit=%s)=<<<%s>>>\n"%(n,c,v)
+ result += " ext: {} (crit={})=<<<{}>>>\n".format(n, c, v)
return result
diff --git a/sfa/trust/credential.py b/sfa/trust/credential.py
index 3f658fb..daa8a89 100644
--- a/sfa/trust/credential.py
+++ b/sfa/trust/credential.py
@@ -26,14 +26,16 @@
# Credentials are signed XML files that assign a subject gid privileges to an object gid
##
+from __future__ import print_function
+
import os, os.path
import subprocess
-from types import StringTypes
import datetime
-from StringIO import StringIO
from tempfile import mkstemp
from xml.dom.minidom import Document, parseString
+from sfa.util.py23 import PY3, StringType, StringIO
+
HAVELXML = False
try:
from lxml import etree
@@ -59,13 +61,13 @@ DEFAULT_CREDENTIAL_LIFETIME = 86400 * 31
# . Need to add support for other types of credentials, e.g. tickets
# . add namespaces to signed-credential element?
-signature_template = \
+signature_format = \
'''
-<Signature xml:id="Sig_%s" xmlns="http://www.w3.org/2000/09/xmldsig#">
+<Signature xml:id="Sig_{refid}" xmlns="http://www.w3.org/2000/09/xmldsig#">
<SignedInfo>
<CanonicalizationMethod Algorithm="http://www.w3.org/TR/2001/REC-xml-c14n-20010315"/>
<SignatureMethod Algorithm="http://www.w3.org/2000/09/xmldsig#rsa-sha1"/>
- <Reference URI="#%s">
+ <Reference URI="#{refid}">
<Transforms>
<Transform Algorithm="http://www.w3.org/2000/09/xmldsig#enveloped-signature" />
</Transforms>
@@ -85,35 +87,6 @@ signature_template = \
</Signature>
'''
-# PG formats the template (whitespace) slightly differently.
-# Note that they don't include the xmlns in the template, but add it later.
-# Otherwise the two are equivalent.
-#signature_template_as_in_pg = \
-#'''
-#<Signature xml:id="Sig_%s" >
-# <SignedInfo>
-# <CanonicalizationMethod Algorithm="http://www.w3.org/TR/2001/REC-xml-c14n-20010315"/>
-# <SignatureMethod Algorithm="http://www.w3.org/2000/09/xmldsig#rsa-sha1"/>
-# <Reference URI="#%s">
-# <Transforms>
-# <Transform Algorithm="http://www.w3.org/2000/09/xmldsig#enveloped-signature" />
-# </Transforms>
-# <DigestMethod Algorithm="http://www.w3.org/2000/09/xmldsig#sha1"/>
-# <DigestValue></DigestValue>
-# </Reference>
-# </SignedInfo>
-# <SignatureValue />
-# <KeyInfo>
-# <X509Data >
-# <X509SubjectName/>
-# <X509IssuerSerial/>
-# <X509Certificate/>
-# </X509Data>
-# <KeyValue />
-# </KeyInfo>
-#</Signature>
-#'''
-
##
# Convert a string into a bool
# used to convert an xsd:boolean to a Python boolean
@@ -181,21 +154,27 @@ class Signature(object):
self.gid = gid
def decode(self):
+ # Helper function to pull characters off the front of a string if present
+ def remove_prefix(text, prefix):
+ if text and prefix and text.startswith(prefix):
+ return text[len(prefix):]
+ return text
+
try:
doc = parseString(self.xml)
- except ExpatError,e:
- logger.log_exc ("Failed to parse credential, %s"%self.xml)
+ except ExpatError as e:
+ logger.log_exc("Failed to parse credential, {}".format(self.xml))
raise
sig = doc.getElementsByTagName("Signature")[0]
## This code until the end of function rewritten by Aaron Helsinger
- ref_id = sig.getAttribute("xml:id").strip().strip("Sig_")
+ ref_id = remove_prefix(sig.getAttribute("xml:id").strip(), "Sig_")
# The xml:id tag is optional, and could be in a
# Reference xml:id or Reference UID sub element instead
if not ref_id or ref_id == '':
reference = sig.getElementsByTagName('Reference')[0]
- ref_id = reference.getAttribute('xml:id').strip().strip('Sig_')
+ ref_id = remove_prefix(reference.getAttribute('xml:id').strip(), "Sig_")
if not ref_id or ref_id == '':
- ref_id = reference.getAttribute('URI').strip().strip('#')
+ ref_id = remove_prefix(reference.getAttribute('URI').strip(), "#")
self.set_refid(ref_id)
keyinfos = sig.getElementsByTagName("X509Data")
gids = None
@@ -205,7 +184,7 @@ class Signature(object):
if len(cert.childNodes) > 0:
szgid = cert.childNodes[0].nodeValue
szgid = szgid.strip()
- szgid = "-----BEGIN CERTIFICATE-----\n%s\n-----END CERTIFICATE-----" % szgid
+ szgid = "-----BEGIN CERTIFICATE-----\n{}\n-----END CERTIFICATE-----".format(szgid)
if gids is None:
gids = szgid
else:
@@ -215,7 +194,7 @@ class Signature(object):
self.set_issuer_gid(GID(string=gids))
def encode(self):
- self.xml = signature_template % (self.get_refid(), self.get_refid())
+ self.xml = signature_format.format(refid=self.get_refid())
##
# A credential provides a caller gid with privileges to an object gid.
@@ -277,7 +256,7 @@ class Credential(object):
self.version = None
if cred:
- if isinstance(cred, StringTypes):
+ if isinstance(cred, StringType):
string = cred
self.type = Credential.SFA_CREDENTIAL_TYPE
self.version = '3'
@@ -290,11 +269,12 @@ class Credential(object):
if string:
str = string
elif filename:
- str = file(filename).read()
+ with open(filename) as infile:
+ str = infile.read()
# if this is a legacy credential, write error and bail out
- if isinstance (str, StringTypes) and str.strip().startswith("-----"):
- logger.error("Legacy credentials not supported any more - giving up with %s..."%str[:10])
+ if isinstance(str, StringType) and str.strip().startswith("-----"):
+ logger.error("Legacy credentials not supported any more - giving up with {}...".format(str[:10]))
return
else:
self.xml = str
@@ -405,11 +385,11 @@ class Credential(object):
# Expiration: an absolute UTC time of expiration (as either an int or string or datetime)
#
def set_expiration(self, expiration):
- expiration_datetime = utcparse (expiration)
+ expiration_datetime = utcparse(expiration)
if expiration_datetime is not None:
self.expiration = expiration_datetime
else:
- logger.error ("unexpected input %s in Credential.set_expiration"%expiration)
+ logger.error("unexpected input {} in Credential.set_expiration".format(expiration))
##
# get the lifetime of the credential (always in datetime format)
@@ -496,7 +476,7 @@ class Credential(object):
append_sub(doc, cred, "target_urn", self.gidObject.get_urn())
append_sub(doc, cred, "uuid", "")
if not self.expiration:
- logger.debug("Creating credential valid for %s s"%DEFAULT_CREDENTIAL_LIFETIME)
+ logger.debug("Creating credential valid for {} s".format(DEFAULT_CREDENTIAL_LIFETIME))
self.set_expiration(datetime.datetime.utcnow() + datetime.timedelta(seconds=DEFAULT_CREDENTIAL_LIFETIME))
self.expiration = self.expiration.replace(microsecond=0)
if self.expiration.tzinfo is not None and self.expiration.tzinfo.utcoffset(self.expiration) is not None:
@@ -557,10 +537,12 @@ class Credential(object):
# Below throws InUse exception if we forgot to clone the attribute first
oldAttr = signed_cred.setAttributeNode(attr.cloneNode(True))
if oldAttr and oldAttr.value != attr.value:
- msg = "Delegating cred from owner %s to %s over %s:\n - Replaced attribute %s value '%s' with '%s'" % \
- (self.parent.gidCaller.get_urn(), self.gidCaller.get_urn(), self.gidObject.get_urn(), oldAttr.name, oldAttr.value, attr.value)
+ msg = "Delegating cred from owner {} to {} over {}:\n"
+ "- Replaced attribute {} value '{}' with '{}'"\
+ .format(self.parent.gidCaller.get_urn(), self.gidCaller.get_urn(),
+ self.gidObject.get_urn(), oldAttr.name, oldAttr.value, attr.value)
logger.warn(msg)
- #raise CredentialNotVerifiable("Can't encode new valid delegated credential: %s" % msg)
+ #raise CredentialNotVerifiable("Can't encode new valid delegated credential: {}".format(msg))
p_cred = doc.importNode(sdoc.getElementsByTagName("credential")[0], True)
p = doc.createElement("parent")
@@ -596,12 +578,16 @@ class Credential(object):
f = filep
else:
f = open(filename, "w")
+ if PY3 and isinstance(self.xml, bytes):
+ self.xml = self.xml.decode()
f.write(self.xml)
f.close()
def save_to_string(self, save_parents=True):
if not self.xml:
self.encode()
+ if PY3 and isinstance(self.xml, bytes):
+ self.xml = self.xml.decode()
return self.xml
def get_refid(self):
@@ -637,7 +623,7 @@ class Credential(object):
rid = self.get_refid()
while rid in refs:
val = int(rid[3:])
- rid = "ref%d" % (val + 1)
+ rid = "ref{}".format(val + 1)
# Set the new refid
self.set_refid(rid)
@@ -690,14 +676,13 @@ class Credential(object):
# Call out to xmlsec1 to sign it
- ref = 'Sig_%s' % self.get_refid()
+ ref = 'Sig_{}'.format(self.get_refid())
filename = self.save_to_random_tmp_file()
xmlsec1 = self.get_xmlsec1_path()
if not xmlsec1:
raise Exception("Could not locate required 'xmlsec1' program")
- command = '%s --sign --node-id "%s" --privkey-pem %s,%s %s' \
- % (xmlsec1, ref, self.issuer_privkey, ",".join(gid_files), filename)
-# print 'command',command
+ command = '{} --sign --node-id "{}" --privkey-pem {},{} {}' \
+ .format(xmlsec1, ref, self.issuer_privkey, ",".join(gid_files), filename)
signed = os.popen(command).read()
os.remove(filename)
@@ -722,7 +707,7 @@ class Credential(object):
doc = None
try:
doc = parseString(self.xml)
- except ExpatError,e:
+ except ExpatError as e:
raise CredentialNotVerifiable("Malformed credential")
doc = parseString(self.xml)
sigs = []
@@ -834,7 +819,8 @@ class Credential(object):
xmlschema = etree.XMLSchema(schema_doc)
if not xmlschema.validate(tree):
error = xmlschema.error_log.last_error
- message = "%s: %s (line %s)" % (self.pretty_cred(), error.message, error.line)
+ message = "{}: {} (line {})".format(self.pretty_cred(),
+ error.message, error.line)
raise CredentialNotVerifiable(message)
if trusted_certs_required and trusted_certs is None:
@@ -852,15 +838,15 @@ class Credential(object):
# or non PEM files
trusted_cert_objects.append(GID(filename=f))
ok_trusted_certs.append(f)
- except Exception, exc:
- logger.error("Failed to load trusted cert from %s: %r"%( f, exc))
+ except Exception as exc:
+ logger.error("Failed to load trusted cert from {}: {}".format(f, exc))
trusted_certs = ok_trusted_certs
# make sure it is not expired
if self.get_expiration() < datetime.datetime.utcnow():
- raise CredentialNotVerifiable("Credential %s expired at %s" % \
- (self.pretty_cred(),
- self.expiration.strftime(SFATIME_FORMAT)))
+ raise CredentialNotVerifiable("Credential {} expired at {}" \
+ .format(self.pretty_cred(),
+ self.expiration.strftime(SFATIME_FORMAT)))
# Verify the signatures
filename = self.save_to_random_tmp_file()
@@ -874,11 +860,11 @@ class Credential(object):
cur_cred.get_gid_caller().verify_chain(trusted_cert_objects)
refs = []
- refs.append("Sig_%s" % self.get_refid())
+ refs.append("Sig_{}".format(self.get_refid()))
parentRefs = self.updateRefID()
for ref in parentRefs:
- refs.append("Sig_%s" % ref)
+ refs.append("Sig_{}".format(ref))
for ref in refs:
# If caller explicitly passed in None that means skip xmlsec1 validation.
@@ -890,7 +876,7 @@ class Credential(object):
# up to fedora20 we used os.popen and checked that the output begins with OK
# turns out, with fedora21, there is extra input before this 'OK' thing
# looks like we're better off just using the exit code - that's what it is made for
- #cert_args = " ".join(['--trusted-pem %s' % x for x in trusted_certs])
+ #cert_args = " ".join(['--trusted-pem {}'.format(x) for x in trusted_certs])
#command = '{} --verify --node-id "{}" {} {} 2>&1'.\
# format(self.xmlsec_path, ref, cert_args, filename)
xmlsec1 = self.get_xmlsec1_path()
@@ -916,8 +902,8 @@ class Credential(object):
mend = verified.find('\\', mstart)
msg = verified[mstart:mend]
logger.warning("Credential.verify - failed - xmlsec1 returned {}".format(verified.strip()))
- raise CredentialNotVerifiable("xmlsec1 error verifying cred %s using Signature ID %s: %s" % \
- (self.pretty_cred(), ref, msg))
+ raise CredentialNotVerifiable("xmlsec1 error verifying cred {} using Signature ID {}: {}"\
+ .format(self.pretty_cred(), ref, msg))
os.remove(filename)
# Verify the parents (delegation)
@@ -954,7 +940,9 @@ class Credential(object):
root_target_gid = root_cred.get_gid_object()
if root_cred.get_signature() is None:
# malformed
- raise CredentialNotVerifiable("Could not verify credential owned by %s for object %s. Cred has no signature" % (self.gidCaller.get_urn(), self.gidObject.get_urn()))
+ raise CredentialNotVerifiable("Could not verify credential owned by {} for object {}. "
+ "Cred has no signature" \
+ .format(self.gidCaller.get_urn(), self.gidObject.get_urn()))
root_cred_signer = root_cred.get_signature().get_issuer_gid()
@@ -1098,7 +1086,7 @@ class Credential(object):
#user_key = Keypair(filename=keyfile)
#user_hrn = self.get_gid_caller().get_hrn()
- subject_string = "%s delegated to %s" % (object_hrn, delegee_hrn)
+ subject_string = "{} delegated to {}".format(object_hrn, delegee_hrn)
dcred = Credential(subject=subject_string)
dcred.set_gid_caller(delegee_gid)
dcred.set_gid_object(object_gid)
@@ -1117,7 +1105,7 @@ class Credential(object):
def get_filename(self):
return getattr(self,'filename',None)
- def actual_caller_hrn (self):
+ def actual_caller_hrn(self):
"""a helper method used by some API calls like e.g. Allocate
to try and find out who really is the original caller
@@ -1139,26 +1127,26 @@ class Credential(object):
# else this looks like a delegated credential, and the real caller is the issuer
else:
actual_caller_hrn=issuer_hrn
- logger.info("actual_caller_hrn: caller_hrn=%s, issuer_hrn=%s, returning %s"
- %(caller_hrn,issuer_hrn,actual_caller_hrn))
+ logger.info("actual_caller_hrn: caller_hrn={}, issuer_hrn={}, returning {}"
+ .format(caller_hrn,issuer_hrn,actual_caller_hrn))
return actual_caller_hrn
##
# Dump the contents of a credential to stdout in human-readable format
#
# @param dump_parents If true, also dump the parent certificates
- def dump (self, *args, **kwargs):
- print self.dump_string(*args, **kwargs)
+ def dump(self, *args, **kwargs):
+ print(self.dump_string(*args, **kwargs))
# SFA code ignores show_xml and disables printing the cred xml
def dump_string(self, dump_parents=False, show_xml=False):
result=""
- result += "CREDENTIAL %s\n" % self.pretty_subject()
+ result += "CREDENTIAL {}\n".format(self.pretty_subject())
filename=self.get_filename()
- if filename: result += "Filename %s\n"%filename
+ if filename: result += "Filename {}\n".format(filename)
privileges = self.get_privileges()
if privileges:
- result += " privs: %s\n" % privileges.save_to_string()
+ result += " privs: {}\n".format(privileges.save_to_string())
else:
result += " privs: \n"
gidCaller = self.get_gid_caller()
@@ -1191,7 +1179,7 @@ class Credential(object):
result += "\nEnd XML\n"
except:
import traceback
- print "exc. Credential.dump_string / XML"
+ print("exc. Credential.dump_string / XML")
traceback.print_exc()
return result
diff --git a/sfa/trust/credential_factory.py b/sfa/trust/credential_factory.py
index 2fe37a7..cf5a8fb 100644
--- a/sfa/trust/credential_factory.py
+++ b/sfa/trust/credential_factory.py
@@ -1,110 +1,112 @@
-#----------------------------------------------------------------------
-# Copyright (c) 2014 Raytheon BBN Technologies
-#
-# Permission is hereby granted, free of charge, to any person obtaining
-# a copy of this software and/or hardware specification (the "Work") to
-# deal in the Work without restriction, including without limitation the
-# rights to use, copy, modify, merge, publish, distribute, sublicense,
-# and/or sell copies of the Work, and to permit persons to whom the Work
-# is furnished to do so, subject to the following conditions:
-#
-# The above copyright notice and this permission notice shall be
-# included in all copies or substantial portions of the Work.
-#
-# THE WORK IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
-# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
-# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-# HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
-# OUT OF OR IN CONNECTION WITH THE WORK OR THE USE OR OTHER DEALINGS
-# IN THE WORK.
-#----------------------------------------------------------------------
-
-from sfa.util.sfalogging import logger
-from sfa.trust.credential import Credential
-from sfa.trust.abac_credential import ABACCredential
-
-import json
-import re
-
-# Factory for creating credentials of different sorts by type.
-# Specifically, this factory can create standard SFA credentials
-# and ABAC credentials from XML strings based on their identifying content
-
-class CredentialFactory:
-
- UNKNOWN_CREDENTIAL_TYPE = 'geni_unknown'
-
- # Static Credential class method to determine the type of a credential
- # string depending on its contents
- @staticmethod
- def getType(credString):
- credString_nowhitespace = re.sub('\s', '', credString)
- if credString_nowhitespace.find('<type>abac</type>') > -1:
- return ABACCredential.ABAC_CREDENTIAL_TYPE
- elif credString_nowhitespace.find('<type>privilege</type>') > -1:
- return Credential.SFA_CREDENTIAL_TYPE
- else:
- st = credString_nowhitespace.find('<type>')
- end = credString_nowhitespace.find('</type>', st)
- return credString_nowhitespace[st + len('<type>'):end]
-# return CredentialFactory.UNKNOWN_CREDENTIAL_TYPE
-
- # Static Credential class method to create the appropriate credential
- # (SFA or ABAC) depending on its type
- @staticmethod
- def createCred(credString=None, credFile=None):
- if not credString and not credFile:
- raise Exception("CredentialFactory.createCred called with no argument")
- if credFile:
- try:
- credString = open(credFile).read()
- except Exception, e:
- logger.info("Error opening credential file %s: %s" % credFile, e)
- return None
-
- # Try to treat the file as JSON, getting the cred_type from the struct
- try:
- credO = json.loads(credString, encoding='ascii')
- if credO.has_key('geni_value') and credO.has_key('geni_type'):
- cred_type = credO['geni_type']
- credString = credO['geni_value']
- except Exception, e:
- # It wasn't a struct. So the credString is XML. Pull the type directly from the string
- logger.debug("Credential string not JSON: %s" % e)
- cred_type = CredentialFactory.getType(credString)
-
- if cred_type == Credential.SFA_CREDENTIAL_TYPE:
- try:
- cred = Credential(string=credString)
- return cred
- except Exception, e:
- if credFile:
- msg = "credString started: %s" % credString[:50]
- raise Exception("%s not a parsable SFA credential: %s. " % (credFile, e) + msg)
- else:
- raise Exception("SFA Credential not parsable: %s. Cred start: %s..." % (e, credString[:50]))
-
- elif cred_type == ABACCredential.ABAC_CREDENTIAL_TYPE:
- try:
- cred = ABACCredential(string=credString)
- return cred
- except Exception, e:
- if credFile:
- raise Exception("%s not a parsable ABAC credential: %s" % (credFile, e))
- else:
- raise Exception("ABAC Credential not parsable: %s. Cred start: %s..." % (e, credString[:50]))
- else:
- raise Exception("Unknown credential type '%s'" % cred_type)
-
-if __name__ == "__main__":
- c2 = open('/tmp/sfa.xml').read()
- cred1 = CredentialFactory.createCred(credFile='/tmp/cred.xml')
- cred2 = CredentialFactory.createCred(credString=c2)
-
- print "C1 = %s" % cred1
- print "C2 = %s" % cred2
- c1s = cred1.dump_string()
- print "C1 = %s" % c1s
-# print "C2 = %s" % cred2.dump_string()
+#----------------------------------------------------------------------
+# Copyright (c) 2014 Raytheon BBN Technologies
+#
+# Permission is hereby granted, free of charge, to any person obtaining
+# a copy of this software and/or hardware specification (the "Work") to
+# deal in the Work without restriction, including without limitation the
+# rights to use, copy, modify, merge, publish, distribute, sublicense,
+# and/or sell copies of the Work, and to permit persons to whom the Work
+# is furnished to do so, subject to the following conditions:
+#
+# The above copyright notice and this permission notice shall be
+# included in all copies or substantial portions of the Work.
+#
+# THE WORK IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
+# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+# HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+# OUT OF OR IN CONNECTION WITH THE WORK OR THE USE OR OTHER DEALINGS
+# IN THE WORK.
+#----------------------------------------------------------------------
+
+from __future__ import print_function
+
+from sfa.util.sfalogging import logger
+from sfa.trust.credential import Credential
+from sfa.trust.abac_credential import ABACCredential
+
+import json
+import re
+
+# Factory for creating credentials of different sorts by type.
+# Specifically, this factory can create standard SFA credentials
+# and ABAC credentials from XML strings based on their identifying content
+
+class CredentialFactory:
+
+ UNKNOWN_CREDENTIAL_TYPE = 'geni_unknown'
+
+ # Static Credential class method to determine the type of a credential
+ # string depending on its contents
+ @staticmethod
+ def getType(credString):
+ credString_nowhitespace = re.sub('\s', '', credString)
+ if credString_nowhitespace.find('<type>abac</type>') > -1:
+ return ABACCredential.ABAC_CREDENTIAL_TYPE
+ elif credString_nowhitespace.find('<type>privilege</type>') > -1:
+ return Credential.SFA_CREDENTIAL_TYPE
+ else:
+ st = credString_nowhitespace.find('<type>')
+ end = credString_nowhitespace.find('</type>', st)
+ return credString_nowhitespace[st + len('<type>'):end]
+# return CredentialFactory.UNKNOWN_CREDENTIAL_TYPE
+
+ # Static Credential class method to create the appropriate credential
+ # (SFA or ABAC) depending on its type
+ @staticmethod
+ def createCred(credString=None, credFile=None):
+ if not credString and not credFile:
+ raise Exception("CredentialFactory.createCred called with no argument")
+ if credFile:
+ try:
+ credString = open(credFile).read()
+ except Exception as e:
+ logger.info("Error opening credential file %s: %s" % credFile, e)
+ return None
+
+ # Try to treat the file as JSON, getting the cred_type from the struct
+ try:
+ credO = json.loads(credString, encoding='ascii')
+ if 'geni_value' in credO and 'geni_type' in credO:
+ cred_type = credO['geni_type']
+ credString = credO['geni_value']
+ except Exception as e:
+ # It wasn't a struct. So the credString is XML. Pull the type directly from the string
+ logger.debug("Credential string not JSON: %s" % e)
+ cred_type = CredentialFactory.getType(credString)
+
+ if cred_type == Credential.SFA_CREDENTIAL_TYPE:
+ try:
+ cred = Credential(string=credString)
+ return cred
+ except Exception as e:
+ if credFile:
+ msg = "credString started: %s" % credString[:50]
+ raise Exception("%s not a parsable SFA credential: %s. " % (credFile, e) + msg)
+ else:
+ raise Exception("SFA Credential not parsable: %s. Cred start: %s..." % (e, credString[:50]))
+
+ elif cred_type == ABACCredential.ABAC_CREDENTIAL_TYPE:
+ try:
+ cred = ABACCredential(string=credString)
+ return cred
+ except Exception as e:
+ if credFile:
+ raise Exception("%s not a parsable ABAC credential: %s" % (credFile, e))
+ else:
+ raise Exception("ABAC Credential not parsable: %s. Cred start: %s..." % (e, credString[:50]))
+ else:
+ raise Exception("Unknown credential type '%s'" % cred_type)
+
+if __name__ == "__main__":
+ c2 = open('/tmp/sfa.xml').read()
+ cred1 = CredentialFactory.createCred(credFile='/tmp/cred.xml')
+ cred2 = CredentialFactory.createCred(credString=c2)
+
+ print("C1 = %s" % cred1)
+ print("C2 = %s" % cred2)
+ c1s = cred1.dump_string()
+ print("C1 = %s" % c1s)
+# print "C2 = %s" % cred2.dump_string()
diff --git a/sfa/trust/gid.py b/sfa/trust/gid.py
index 6e668fa..3f903d9 100644
--- a/sfa/trust/gid.py
+++ b/sfa/trust/gid.py
@@ -25,14 +25,16 @@
# descendant of the certificate class.
##
-import xmlrpclib
+from __future__ import print_function
+
import uuid
from sfa.trust.certificate import Certificate
from sfa.util.faults import GidInvalidParentHrn, GidParentHrn
-from sfa.util.sfalogging import logger
from sfa.util.xrn import hrn_to_urn, urn_to_hrn, hrn_authfor_hrn
+from sfa.util.sfalogging import logger
+from sfa.util.py23 import xmlrpc_client
##
# Create a new uuid. Returns the UUID as a string.
@@ -174,7 +176,7 @@ class GID(Certificate):
dict = {}
if data:
if data.lower().startswith('uri:http://<params>'):
- dict = xmlrpclib.loads(data[11:])[0][0]
+ dict = xmlrpc_client.loads(data[11:])[0][0]
else:
spl = data.split(', ')
for val in spl:
@@ -201,7 +203,7 @@ class GID(Certificate):
# @param dump_parents If true, also dump the parents of the GID
def dump(self, *args, **kwargs):
- print self.dump_string(*args,**kwargs)
+ print(self.dump_string(*args,**kwargs))
def dump_string(self, indent=0, dump_parents=False):
result=" "*(indent-2) + "GID\n"
diff --git a/sfa/trust/hierarchy.py b/sfa/trust/hierarchy.py
index 43b318e..5e76dbf 100644
--- a/sfa/trust/hierarchy.py
+++ b/sfa/trust/hierarchy.py
@@ -160,7 +160,8 @@ class Hierarchy:
try:
os.makedirs(directory)
# if the path already exists then pass
- except OSError, (errno, strerr):
+ except OSError as xxx_todo_changeme:
+ (errno, strerr) = xxx_todo_changeme.args
if errno == 17:
pass
diff --git a/sfa/trust/sfaticket.py b/sfa/trust/sfaticket.py
index bc8584b..6d4a009 100644
--- a/sfa/trust/sfaticket.py
+++ b/sfa/trust/sfaticket.py
@@ -24,11 +24,13 @@
# implements SFA tickets
#
-import xmlrpclib
+from __future__ import print_function
from sfa.trust.certificate import Certificate
from sfa.trust.gid import GID
+from sfa.util.py23 import xmlrpc_client
+
# Ticket is tuple:
# (gidCaller, gidObject, attributes, rspec, delegate)
#
@@ -97,13 +99,13 @@ class SfaTicket(Certificate):
dict["gidCaller"] = self.gidCaller.save_to_string(save_parents=True)
if self.gidObject:
dict["gidObject"] = self.gidObject.save_to_string(save_parents=True)
- str = "URI:" + xmlrpclib.dumps((dict,), allow_none=True)
+ str = "URI:" + xmlrpc_client.dumps((dict,), allow_none=True)
self.set_data(str)
def decode(self):
data = self.get_data()
if data:
- dict = xmlrpclib.loads(self.get_data()[4:])[0][0]
+ dict = xmlrpc_client.loads(self.get_data()[4:])[0][0]
else:
dict = {}
@@ -124,25 +126,25 @@ class SfaTicket(Certificate):
self.gidObject = None
def dump(self, dump_parents=False):
- print "TICKET", self.get_subject()
+ print("TICKET", self.get_subject())
- print " gidCaller:"
+ print(" gidCaller:")
gidCaller = self.get_gid_caller()
if gidCaller:
gidCaller.dump(8, dump_parents)
- print " gidObject:"
+ print(" gidObject:")
gidObject = self.get_gid_object()
if gidObject:
gidObject.dump(8, dump_parents)
- print " attributes:"
+ print(" attributes:")
for attrname in self.get_attributes().keys():
- print " ", attrname, self.get_attributes()[attrname]
+ print(" ", attrname, self.get_attributes()[attrname])
- print " rspec:"
- print " ", self.get_rspec()
+ print(" rspec:")
+ print(" ", self.get_rspec())
if self.parent and dump_parents:
- print "PARENT",
+ print("PARENT", end=' ')
self.parent.dump(dump_parents)
diff --git a/sfa/trust/speaksfor_util.py b/sfa/trust/speaksfor_util.py
index 9bad61d..640d512 100644
--- a/sfa/trust/speaksfor_util.py
+++ b/sfa/trust/speaksfor_util.py
@@ -31,16 +31,16 @@ import subprocess
import sys
import tempfile
from xml.dom.minidom import *
-from StringIO import StringIO
from sfa.util.sfatime import SFATIME_FORMAT
from sfa.trust.certificate import Certificate
-from sfa.trust.credential import Credential, signature_template, HAVELXML
+from sfa.trust.credential import Credential, signature_format, HAVELXML
from sfa.trust.abac_credential import ABACCredential, ABACElement
from sfa.trust.credential_factory import CredentialFactory
from sfa.trust.gid import GID
from sfa.util.sfalogging import logger
+from sfa.util.py23 import StringIO
# Routine to validate that a speaks-for credential
# says what it claims to say:
@@ -56,7 +56,8 @@ from sfa.util.sfalogging import logger
# Find the text associated with first child text node
def findTextChildValue(root):
child = findChildNamed(root, '#text')
- if child: return str(child.nodeValue)
+ if child:
+ return str(child.nodeValue)
return None
# Find first child with given name
@@ -85,7 +86,7 @@ def run_subprocess(cmd, stdout, stderr):
output = proc.returncode
return output
except Exception as e:
- raise Exception("Failed call to subprocess '%s': %s" % (" ".join(cmd), e))
+ raise Exception("Failed call to subprocess '{}': {}".format(" ".join(cmd), e))
def get_cert_keyid(gid):
"""Extract the subject key identifier from the given certificate.
@@ -134,26 +135,28 @@ def verify_speaks_for(cred, tool_gid, speaking_for_urn,
# Credential has not expired
if cred.expiration and cred.expiration < datetime.datetime.utcnow():
- return False, None, "ABAC Credential expired at %s (%s)" % (cred.expiration.strftime(SFATIME_FORMAT), cred.pretty_cred())
+ return False, None, "ABAC Credential expired at {} ({})"\
+ .format(cred.expiration.strftime(SFATIME_FORMAT), cred.pretty_cred())
# Must be ABAC
if cred.get_cred_type() != ABACCredential.ABAC_CREDENTIAL_TYPE:
- return False, None, "Credential not of type ABAC but %s" % cred.get_cred_type
+ return False, None, "Credential not of type ABAC but {}".format(cred.get_cred_type)
if cred.signature is None or cred.signature.gid is None:
- return False, None, "Credential malformed: missing signature or signer cert. Cred: %s" % cred.pretty_cred()
+ return False, None, "Credential malformed: missing signature or signer cert. Cred: {}"\
+ .format(cred.pretty_cred())
user_gid = cred.signature.gid
user_urn = user_gid.get_urn()
# URN of signer from cert must match URN of 'speaking-for' argument
if user_urn != speaking_for_urn:
- return False, None, "User URN from cred doesn't match speaking_for URN: %s != %s (cred %s)" % \
- (user_urn, speaking_for_urn, cred.pretty_cred())
+ return False, None, "User URN from cred doesn't match speaking_for URN: {} != {} (cred {})"\
+ .format(user_urn, speaking_for_urn, cred.pretty_cred())
tails = cred.get_tails()
if len(tails) != 1:
- return False, None, "Invalid ABAC-SF credential: Need exactly 1 tail element, got %d (%s)" % \
- (len(tails), cred.pretty_cred())
+ return False, None, "Invalid ABAC-SF credential: Need exactly 1 tail element, got {} ({})"\
+ .format(len(tails), cred.pretty_cred())
user_keyid = get_cert_keyid(user_gid)
tool_keyid = get_cert_keyid(tool_gid)
@@ -188,13 +191,14 @@ def verify_speaks_for(cred, tool_gid, speaking_for_urn,
msg = verified[mstart:mend]
if msg == "":
msg = output
- return False, None, "ABAC credential failed to xmlsec1 verify: %s" % msg
+ return False, None, "ABAC credential failed to xmlsec1 verify: {}".format(msg)
# Must say U.speaks_for(U)<-T
if user_keyid != principal_keyid or \
tool_keyid != subject_keyid or \
- role != ('speaks_for_%s' % user_keyid):
- return False, None, "ABAC statement doesn't assert U.speaks_for(U)<-T (%s)" % cred.pretty_cred()
+ role != ('speaks_for_{}'.format(user_keyid)):
+ return False, None, "ABAC statement doesn't assert U.speaks_for(U)<-T ({})"\
+ .format(cred.pretty_cred())
# If schema provided, validate against schema
if HAVELXML and schema and os.path.exists(schema):
@@ -204,23 +208,22 @@ def verify_speaks_for(cred, tool_gid, speaking_for_urn,
xmlschema = etree.XMLSchema(schema_doc)
if not xmlschema.validate(tree):
error = xmlschema.error_log.last_error
- message = "%s: %s (line %s)" % (cred.pretty_cred(), error.message, error.line)
- return False, None, ("XML Credential schema invalid: %s" % message)
+ message = "{}: {} (line {})".format(cred.pretty_cred(), error.message, error.line)
+ return False, None, ("XML Credential schema invalid: {}".format(message))
if trusted_roots:
# User certificate must validate against trusted roots
try:
user_gid.verify_chain(trusted_roots)
- except Exception, e:
+ except Exception as e:
return False, None, \
- "Cred signer (user) cert not trusted: %s" % e
+ "Cred signer (user) cert not trusted: {}".format(e)
# Tool certificate must validate against trusted roots
try:
tool_gid.verify_chain(trusted_roots)
- except Exception, e:
- return False, None, \
- "Tool cert not trusted: %s" % e
+ except Exception as e:
+ return False, None, "Tool cert not trusted: {}".format(e)
return True, user_gid, ""
@@ -258,9 +261,9 @@ def determine_speaks_for(logger, credentials, caller_gid, speaking_for_xrn, trus
if not isinstance(cred_value, ABACCredential):
cred = CredentialFactory.createCred(cred_value)
-# print("Got a cred to check speaksfor for: %s" % cred.pretty_cred())
+# print("Got a cred to check speaksfor for: {}".format(cred.pretty_cred()))
# #cred.dump(True, True)
-# print("Caller: %s" % caller_gid.dump_string(2, True))
+# print("Caller: {}".format(caller_gid.dump_string(2, True)))
# See if this is a valid speaks_for
is_valid_speaks_for, user_gid, msg = \
verify_speaks_for(cred,
@@ -270,7 +273,8 @@ def determine_speaks_for(logger, credentials, caller_gid, speaking_for_xrn, trus
if is_valid_speaks_for:
return user_gid # speaks-for
else:
- logger.info("Got speaks-for option but not a valid speaks_for with this credential: %s" % msg)
+ logger.info("Got speaks-for option but not a valid speaks_for with this credential: {}"
+ .format(msg))
return caller_gid # Not speaks-for
# Create an ABAC Speaks For credential using the ABACCredential object and it's encode&sign methods
@@ -294,7 +298,7 @@ def create_sign_abaccred(tool_gid, user_gid, ma_gid, user_key_file, cred_filenam
user_urn = user_gid.get_urn()
user_keyid = get_cert_keyid(user_gid)
tool_keyid = get_cert_keyid(tool_gid)
- cred.head = ABACElement(user_keyid, user_urn, "speaks_for_%s" % user_keyid)
+ cred.head = ABACElement(user_keyid, user_urn, "speaks_for_{}".format(user_keyid))
cred.tails.append(ABACElement(tool_keyid, tool_urn))
cred.set_expiration(datetime.datetime.utcnow() + datetime.timedelta(days=dur_days))
cred.expiration = cred.expiration.replace(microsecond=0)
@@ -306,8 +310,8 @@ def create_sign_abaccred(tool_gid, user_gid, ma_gid, user_key_file, cred_filenam
cred.sign()
# Save it
cred.save_to_file(cred_filename)
- logger.info("Created ABAC credential: '%s' in file %s" %
- (cred.pretty_cred(), cred_filename))
+ logger.info("Created ABAC credential: '{}' in file {}"
+ .format(cred.pretty_cred(), cred_filename))
# FIXME: Assumes signer is itself signed by an 'ma_gid' that can be trusted
def create_speaks_for(tool_gid, user_gid, ma_gid,
@@ -315,41 +319,37 @@ def create_speaks_for(tool_gid, user_gid, ma_gid,
tool_urn = tool_gid.get_urn()
user_urn = user_gid.get_urn()
- header = '<?xml version="1.0" encoding="UTF-8"?>'
- reference = "ref0"
- signature_block = \
- '<signatures>\n' + \
- signature_template + \
- '</signatures>'
- template = header + '\n' + \
- '<signed-credential '
- template += 'xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:noNamespaceSchemaLocation="http://www.geni.net/resources/credential/2/credential.xsd" xsi:schemaLocation="http://www.protogeni.net/resources/credential/ext/policy/1 http://www.protogeni.net/resources/credential/ext/policy/1/policy.xsd"'
- template += '>\n' + \
- '<credential xml:id="%s">\n' + \
- '<type>abac</type>\n' + \
- '<serial/>\n' +\
- '<owner_gid/>\n' + \
- '<owner_urn/>\n' + \
- '<target_gid/>\n' + \
- '<target_urn/>\n' + \
- '<uuid/>\n' + \
- '<expires>%s</expires>' +\
- '<abac>\n' + \
- '<rt0>\n' + \
- '<version>%s</version>\n' + \
- '<head>\n' + \
- '<ABACprincipal><keyid>%s</keyid><mnemonic>%s</mnemonic></ABACprincipal>\n' +\
- '<role>speaks_for_%s</role>\n' + \
- '</head>\n' + \
- '<tail>\n' +\
- '<ABACprincipal><keyid>%s</keyid><mnemonic>%s</mnemonic></ABACprincipal>\n' +\
- '</tail>\n' +\
- '</rt0>\n' + \
- '</abac>\n' + \
- '</credential>\n' + \
- signature_block + \
- '</signed-credential>\n'
-
+ refid = "ref0"
+
+ credential_format = """\
+<?xml version="1.0" encoding="UTF-8"?>
+<signed-credential xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:noNamespaceSchemaLocation="http://www.geni.net/resources/credential/2/credential.xsd" xsi:schemaLocation="http://www.protogeni.net/resources/credential/ext/policy/1 http://www.protogeni.net/resources/credential/ext/policy/1/policy.xsd">
+ <credential xml:id="{refid}">
+ <type>abac</type>
+ <serial/>
+ <owner_gid/>
+ <owner_urn/>
+ <target_gid/>
+ <target_urn/>
+ <uuid/>
+ <expires>{expiration_str}</expires>
+ <abac>
+ <rt0>
+ <version>{version}</version>
+ <head>
+ <ABACprincipal><keyid>{user_keyid}</keyid><mnemonic>{user_urn}</mnemonic></ABACprincipal>
+ <role>speaks_for_{user_keyid}</role>
+ </head>
+ <tail>
+ <ABACprincipal><keyid>{tool_keyid}/keyid><mnemonic>{tool_urn}</mnemonic></ABACprincipal>
+ </tail>
+ </rt0>
+ </abac>
+ </credential>
+ <signatures>""" + signature_format + """\
+ </signatures>
+</signed-credential>\
+"""
credential_duration = datetime.timedelta(days=dur_days)
expiration = datetime.datetime.utcnow() + credential_duration
@@ -358,16 +358,15 @@ def create_speaks_for(tool_gid, user_gid, ma_gid,
user_keyid = get_cert_keyid(user_gid)
tool_keyid = get_cert_keyid(tool_gid)
- unsigned_cred = template % (reference, expiration_str, version,
- user_keyid, user_urn, user_keyid, tool_keyid, tool_urn,
- reference, reference)
+ # apply the format - itself uses signature_format which uses 'refid'
+ unsigned_cred = credential_format.format(**locals())
unsigned_cred_filename = write_to_tempfile(unsigned_cred)
# Now sign the file with xmlsec1
# xmlsec1 --sign --privkey-pem privkey.pem,cert.pem
# --output signed.xml tosign.xml
- pems = "%s,%s,%s" % (user_key_file, user_gid.get_filename(),
- ma_gid.get_filename())
+ pems = "{},{},{}".format(user_key_file, user_gid.get_filename(),
+ ma_gid.get_filename())
xmlsec1 = Credential.get_xmlsec1_path()
if not xmlsec1:
raise Exception("Could not locate required 'xmlsec1' program")
@@ -379,8 +378,8 @@ def create_speaks_for(tool_gid, user_gid, ma_gid,
if sign_proc_output == None:
logger.info("xmlsec1 returns empty output")
else:
- logger.info("Created ABAC credential: '%s speaks_for %s' in file %s" %
- (tool_urn, user_urn, cred_filename))
+ logger.info("Created ABAC credential: '{} speaks_for {}' in file {}"
+ .format(tool_urn, user_urn, cred_filename))
os.unlink(unsigned_cred_filename)
@@ -451,5 +450,5 @@ if __name__ == "__main__":
trusted_roots)
- print('SPEAKS_FOR = %s' % (gid != tool_gid))
- print("CERT URN = %s" % gid.get_urn())
+ print('SPEAKS_FOR = {}'.format(gid != tool_gid))
+ print("CERT URN = {}".format(gid.get_urn()))
diff --git a/sfa/util/cache.py b/sfa/util/cache.py
index ee4716c..75b22e4 100644
--- a/sfa/util/cache.py
+++ b/sfa/util/cache.py
@@ -75,7 +75,7 @@ class Cache:
def add(self, key, value, ttl = DEFAULT_CACHE_TTL):
with self.lock:
- if self.cache.has_key(key):
+ if key in self.cache:
self.cache[key].set_data(value, ttl=ttl)
else:
self.cache[key] = CacheData(value, ttl)
@@ -108,9 +108,9 @@ class Cache:
return self.__str()
def save_to_file(self, filename):
- f = open(filename, 'w')
+ f = open(filename, 'wb')
pickle.dump(self.cache, f)
def load_from_file(self, filename):
- f = open(filename, 'r')
+ f = open(filename, 'rb')
self.cache = pickle.load(f)
diff --git a/sfa/util/callids.py b/sfa/util/callids.py
index ead60bb..6748546 100644
--- a/sfa/util/callids.py
+++ b/sfa/util/callids.py
@@ -44,7 +44,7 @@ class _call_ids_impl (dict):
logger.warning("_call_ids_impl.should_handle_call_id: could not acquire lock")
return False
# we're good to go
- if self.has_key(call_id):
+ if call_id in self:
self._purge()
self._lock.release()
return True
diff --git a/sfa/util/config.py b/sfa/util/config.py
index 797bed7..e5435a7 100644
--- a/sfa/util/config.py
+++ b/sfa/util/config.py
@@ -2,11 +2,11 @@
import sys
import os
import time
-import ConfigParser
import tempfile
import codecs
-from StringIO import StringIO
from sfa.util.xml import XML
+from sfa.util.py23 import StringIO
+from sfa.util.py23 import ConfigParser
default_config = \
"""
diff --git a/sfa/util/faults.py b/sfa/util/faults.py
index 4a614b1..702b685 100644
--- a/sfa/util/faults.py
+++ b/sfa/util/faults.py
@@ -24,14 +24,14 @@
# SFA API faults
#
-import xmlrpclib
from sfa.util.genicode import GENICODE
+from sfa.util.py23 import xmlrpc_client
-class SfaFault(xmlrpclib.Fault):
+class SfaFault(xmlrpc_client.Fault):
def __init__(self, faultCode, faultString, extra = None):
if extra:
faultString += ": " + str(extra)
- xmlrpclib.Fault.__init__(self, faultCode, faultString)
+ xmlrpc_client.Fault.__init__(self, faultCode, faultString)
class Forbidden(SfaFault):
def __init__(self, extra = None):
@@ -272,10 +272,10 @@ class SliverDoesNotExist(SfaFault):
def __str__(self):
return repr(self.value)
-class BadRequestHash(xmlrpclib.Fault):
+class BadRequestHash(xmlrpc_client.Fault):
def __init__(self, hash = None, extra = None):
faultString = "bad request hash: " + str(hash)
- xmlrpclib.Fault.__init__(self, GENICODE.ERROR, faultString)
+ xmlrpc_client.Fault.__init__(self, GENICODE.ERROR, faultString)
class MissingTrustedRoots(SfaFault):
def __init__(self, value, extra = None):
diff --git a/sfa/util/method.py b/sfa/util/method.py
index e7bdee7..009220c 100644
--- a/sfa/util/method.py
+++ b/sfa/util/method.py
@@ -4,10 +4,11 @@
#
import time
-from types import IntType, LongType, StringTypes
+from types import IntType, LongType
import textwrap
from sfa.util.sfalogging import logger
+from sfa.util.py23 import StringType
from sfa.util.faults import SfaFault, SfaInvalidAPIMethod, SfaInvalidArgumentCount, SfaInvalidArgument
from sfa.storage.parameter import Parameter, Mixed, python_type, xmlrpc_type
@@ -203,7 +204,7 @@ class Method:
try:
self.type_check(name, value, item, args)
return
- except SfaInvalidArgument, fault:
+ except SfaInvalidArgument as fault:
pass
raise fault
@@ -233,7 +234,7 @@ class Method:
# Strings are a special case. Accept either unicode or str
# types if a string is expected.
- if expected_type in StringTypes and isinstance(value, StringTypes):
+ if issubclass(expected_type, StringType) and isinstance(value, StringType):
pass
# Integers and long integers are also special types. Accept
@@ -247,23 +248,23 @@ class Method:
name)
# If a minimum or maximum (length, value) has been specified
- if expected_type in StringTypes:
+ if issubclass(expected_type, StringType):
if min is not None and \
len(value.encode(self.api.encoding)) < min:
- raise SfaInvalidArgument, "%s must be at least %d bytes long" % (name, min)
+ raise SfaInvalidArgument("%s must be at least %d bytes long" % (name, min))
if max is not None and \
len(value.encode(self.api.encoding)) > max:
- raise SfaInvalidArgument, "%s must be at most %d bytes long" % (name, max)
+ raise SfaInvalidArgument("%s must be at most %d bytes long" % (name, max))
elif expected_type in (list, tuple, set):
if min is not None and len(value) < min:
- raise SfaInvalidArgument, "%s must contain at least %d items" % (name, min)
+ raise SfaInvalidArgument("%s must contain at least %d items" % (name, min))
if max is not None and len(value) > max:
- raise SfaInvalidArgument, "%s must contain at most %d items" % (name, max)
+ raise SfaInvalidArgument("%s must contain at most %d items" % (name, max))
else:
if min is not None and value < min:
- raise SfaInvalidArgument, "%s must be > %s" % (name, str(min))
+ raise SfaInvalidArgument("%s must be > %s" % (name, str(min)))
if max is not None and value > max:
- raise SfaInvalidArgument, "%s must be < %s" % (name, str(max))
+ raise SfaInvalidArgument("%s must be < %s" % (name, str(max)))
# If a list with particular types of items is expected
if isinstance(expected, (list, tuple, set)):
diff --git a/sfa/util/prefixTree.py b/sfa/util/prefixTree.py
index 93b0c5c..0d7a557 100755
--- a/sfa/util/prefixTree.py
+++ b/sfa/util/prefixTree.py
@@ -1,3 +1,5 @@
+from __future__ import print_function
+
class prefixNode:
def __init__(self, prefix):
@@ -88,10 +90,10 @@ class prefixTree:
"""
if not node:
node = self.root
- print node.prefix
+ print(node.prefix)
for child in node.children:
- print child.prefix,
+ print(child.prefix, end=' ')
for child in node.children:
self.dump(child)
diff --git a/sfa/util/py23.py b/sfa/util/py23.py
new file mode 100644
index 0000000..d26ebd1
--- /dev/null
+++ b/sfa/util/py23.py
@@ -0,0 +1,28 @@
+# our own simplistic replacement for six
+import sys
+PY3 = sys.version_info[0] == 3
+
+try:
+ StringType = basestring
+except:
+ StringType = str
+
+try:
+ from StringIO import StringIO
+except:
+ from io import StringIO
+
+try:
+ import xmlrpclib as xmlrpc_client
+except:
+ from xmlrpc import client as xmlrpc_client
+
+try:
+ import httplib as http_client
+except:
+ from http import client as http_client
+
+try:
+ import ConfigParser
+except:
+ import configparser as ConfigParser
diff --git a/sfa/util/sfalogging.py b/sfa/util/sfalogging.py
index 361a243..2b73b48 100644
--- a/sfa/util/sfalogging.py
+++ b/sfa/util/sfalogging.py
@@ -23,6 +23,8 @@
# IN THE WORK.
#----------------------------------------------------------------------
+from __future__ import print_function
+
import os, sys
import traceback
import logging, logging.handlers
@@ -183,14 +185,14 @@ def profile(logger):
if __name__ == '__main__':
- print 'testing sfalogging into logger.log'
+ print('testing sfalogging into logger.log')
logger1=_SfaLogger('logger.log', loggername='std(info)')
logger2=_SfaLogger('logger.log', loggername='error', level=logging.ERROR)
logger3=_SfaLogger('logger.log', loggername='debug', level=logging.DEBUG)
for (logger,msg) in [ (logger1,"std(info)"),(logger2,"error"),(logger3,"debug")]:
- print "====================",msg, logger.logger.handlers
+ print("====================",msg, logger.logger.handlers)
logger.enable_console()
logger.critical("logger.critical")
diff --git a/sfa/util/sfatime.py b/sfa/util/sfatime.py
index 75a2e4a..7435667 100644
--- a/sfa/util/sfatime.py
+++ b/sfa/util/sfatime.py
@@ -20,7 +20,8 @@
# OUT OF OR IN CONNECTION WITH THE WORK OR THE USE OR OTHER DEALINGS
# IN THE WORK.
#----------------------------------------------------------------------
-from types import StringTypes
+from __future__ import print_function
+
import time
import datetime
import dateutil.parser
@@ -28,6 +29,7 @@ import calendar
import re
from sfa.util.sfalogging import logger
+from sfa.util.py23 import StringType
SFATIME_FORMAT = "%Y-%m-%dT%H:%M:%SZ"
@@ -53,7 +55,7 @@ For safety this can also handle inputs that are either timestamps, or datetimes
# prepare the input for the checks below by
# casting strings ('1327098335') to ints
- if isinstance(input, StringTypes):
+ if isinstance(input, StringType):
try:
input = int(input)
except ValueError:
@@ -68,7 +70,7 @@ For safety this can also handle inputs that are either timestamps, or datetimes
if isinstance (input, datetime.datetime):
#logger.info ("argument to utcparse already a datetime - doing nothing")
return input
- elif isinstance (input, StringTypes):
+ elif isinstance (input, StringType):
t = dateutil.parser.parse(input)
if t.utcoffset() is not None:
t = t.utcoffset() + t.replace(tzinfo=None)
@@ -98,9 +100,9 @@ def add_datetime(input, days=0, hours=0, minutes=0, seconds=0):
if __name__ == '__main__':
# checking consistency
- print 20*'X'
- print ("Should be close to zero: %s"%(datetime_to_epoch(datetime.datetime.utcnow())-time.time()))
- print 20*'X'
+ print(20*'X')
+ print(("Should be close to zero: %s"%(datetime_to_epoch(datetime.datetime.utcnow())-time.time())))
+ print(20*'X')
for input in [
'+2d',
'+3w',
@@ -112,4 +114,4 @@ if __name__ == '__main__':
'2014-05-28T15:18',
'2014-05-28T15:18:30',
]:
- print "input=%20s -> parsed %s"%(input,datetime_to_string(utcparse(input)))
+ print("input=%20s -> parsed %s"%(input,datetime_to_string(utcparse(input))))
diff --git a/sfa/util/storage.py b/sfa/util/storage.py
index 89a2509..9033434 100644
--- a/sfa/util/storage.py
+++ b/sfa/util/storage.py
@@ -19,8 +19,8 @@ class SimpleStorage(dict):
db_file = open(self.db_filename, 'r')
dict.__init__(self, eval(db_file.read()))
elif os.path.exists(self.db_filename) and not os.path.isfile(self.db_filename):
- raise IOError, '%s exists but is not a file. please remove it and try again' \
- % self.db_filename
+ raise IOError('%s exists but is not a file. please remove it and try again' \
+ % self.db_filename)
else:
self.write()
self.load()
@@ -49,8 +49,8 @@ class XmlStorage(SimpleStorage):
xml = XML(self.db_filename)
dict.__init__(self, xml.todict())
elif os.path.exists(self.db_filename) and not os.path.isfile(self.db_filename):
- raise IOError, '%s exists but is not a file. please remove it and try again' \
- % self.db_filename
+ raise IOError('%s exists but is not a file. please remove it and try again' \
+ % self.db_filename)
else:
self.write()
self.load()
diff --git a/sfa/util/xml.py b/sfa/util/xml.py
index f46443a..3a38ecc 100755
--- a/sfa/util/xml.py
+++ b/sfa/util/xml.py
@@ -1,10 +1,11 @@
#!/usr/bin/python
-from types import StringTypes
from lxml import etree
-from StringIO import StringIO
from sfa.util.faults import InvalidXML
from sfa.rspecs.elements.element import Element
+from sfa.util.py23 import StringType
+from sfa.util.py23 import StringIO
+
# helper functions to help build xpaths
class XpathFilter:
@staticmethod
@@ -163,7 +164,7 @@ class XML:
self.namespaces = namespaces
self.default_namespace = None
self.schema = None
- if isinstance(xml, basestring):
+ if isinstance(xml, StringType):
self.parse_xml(xml)
if isinstance(xml, XmlElement):
self.root = xml
@@ -226,7 +227,7 @@ class XML:
if isinstance(val, dict):
child_element = etree.SubElement(element, key)
self.parse_dict(val, key, child_element)
- elif isinstance(val, basestring):
+ elif isinstance(val, StringType):
child_element = etree.SubElement(element, key).text = val
elif isinstance(value, int):
@@ -239,7 +240,7 @@ class XML:
d=d.copy()
# looks like iteritems won't stand side-effects
for k in d.keys():
- if not isinstance(d[k],StringTypes):
+ if not isinstance(d[k], StringType):
del d[k]
element.attrib.update(d)
diff --git a/sfa/util/xrn.py b/sfa/util/xrn.py
index b16ea51..6198f83 100644
--- a/sfa/util/xrn.py
+++ b/sfa/util/xrn.py
@@ -162,7 +162,7 @@ class Xrn:
def get_hrn_type(self): return (self.hrn, self.type)
def _normalize(self):
- if self.hrn is None: raise SfaAPIError, "Xrn._normalize"
+ if self.hrn is None: raise SfaAPIError("Xrn._normalize")
if not hasattr(self,'leaf'):
self.leaf=Xrn.hrn_split(self.hrn)[-1]
# self.authority keeps a list
@@ -211,7 +211,7 @@ class Xrn:
# if not self.urn or not self.urn.startswith(Xrn.URN_PREFIX):
if not Xrn.is_urn(self.urn):
- raise SfaAPIError, "Xrn.urn_to_hrn"
+ raise SfaAPIError("Xrn.urn_to_hrn")
parts = Xrn.urn_split(self.urn)
type=parts.pop(2)
@@ -249,7 +249,7 @@ class Xrn:
# if not self.hrn or self.hrn.startswith(Xrn.URN_PREFIX):
if Xrn.is_urn(self.hrn):
- raise SfaAPIError, "Xrn.hrn_to_urn, hrn=%s"%self.hrn
+ raise SfaAPIError("Xrn.hrn_to_urn, hrn=%s"%self.hrn)
if self.type and self.type.startswith('authority'):
self.authority = Xrn.hrn_auth_list(self.hrn)
diff --git a/sfatables/commands/moo.py b/sfatables/commands/moo.py
index bd8c88e..65b7598 100644
--- a/sfatables/commands/moo.py
+++ b/sfatables/commands/moo.py
@@ -1,5 +1,7 @@
import os, time
+from sfa.util.py23 import StringType
+
class Command:
commandline_options = []
help = "Add a new rule"
@@ -122,7 +124,7 @@ class Command:
try:
self.type_check(name, value, item, args)
return
- except SfaInvalidArgument, fault:
+ except SfaInvalidArgument as fault:
pass
raise fault
@@ -152,7 +154,7 @@ class Command:
# Strings are a special case. Accept either unicode or str
# types if a string is expected.
- if expected_type in StringTypes and isinstance(value, StringTypes):
+ if issubclass(expected_type, StringType) and isinstance(value, StringType):
pass
# Integers and long integers are also special types. Accept
@@ -167,23 +169,23 @@ class Command:
name)
# If a minimum or maximum (length, value) has been specified
- if expected_type in StringTypes:
+ if issubclass(expected_type, StringType):
if min is not None and \
len(value.encode(self.api.encoding)) < min:
- raise SfaInvalidArgument, "%s must be at least %d bytes long" % (name, min)
+ raise SfaInvalidArgument("%s must be at least %d bytes long" % (name, min))
if max is not None and \
len(value.encode(self.api.encoding)) > max:
- raise SfaInvalidArgument, "%s must be at most %d bytes long" % (name, max)
+ raise SfaInvalidArgument("%s must be at most %d bytes long" % (name, max))
elif expected_type in (list, tuple, set):
if min is not None and len(value) < min:
- raise SfaInvalidArgument, "%s must contain at least %d items" % (name, min)
+ raise SfaInvalidArgument("%s must contain at least %d items" % (name, min))
if max is not None and len(value) > max:
- raise SfaInvalidArgument, "%s must contain at most %d items" % (name, max)
+ raise SfaInvalidArgument("%s must contain at most %d items" % (name, max))
else:
if min is not None and value < min:
- raise SfaInvalidArgument, "%s must be > %s" % (name, str(min))
+ raise SfaInvalidArgument("%s must be > %s" % (name, str(min)))
if max is not None and value > max:
- raise SfaInvalidArgument, "%s must be < %s" % (name, str(max))
+ raise SfaInvalidArgument("%s must be < %s" % (name, str(max)))
# If a list with particular types of items is expected
if isinstance(expected, (list, tuple, set)):
diff --git a/tests/testCert.py b/tests/testCert.py
index 0a6a90c..b2e8133 100755
--- a/tests/testCert.py
+++ b/tests/testCert.py
@@ -1,6 +1,6 @@
import unittest
-import xmlrpclib
from sfa.trust.certificate import Certificate, Keypair
+from sfa.util.py23 import xmlrpc_client
class TestCert(unittest.TestCase):
def setUp(self):
@@ -42,7 +42,7 @@ class TestCert(unittest.TestCase):
# try something a bit more complicated, like an xmlrpc encoding of
# some parameters
cert = Certificate(subject="test")
- data = xmlrpclib.dumps((1, "foo", ["a", "b"], {"c": "d", "e": "f"}, True))
+ data = xmlrpc_client.dumps((1, "foo", ["a", "b"], {"c": "d", "e": "f"}, True))
cert.set_data(data)
self.assertEqual(cert.get_data(), data)
diff --git a/tests/testKeypair.py b/tests/testKeypair.py
index 424740a..3d8d40a 100755
--- a/tests/testKeypair.py
+++ b/tests/testKeypair.py
@@ -25,11 +25,11 @@ class TestKeypair(unittest.TestCase):
self.assertEqual(k.as_pem(), k2.as_pem())
- def test_get_m2_pkey(self):
+ def test_get_m2_pubkey(self):
k = Keypair()
k.create()
- m2 = k.get_m2_pkey()
+ m2 = k.get_m2_pubkey()
self.assert_(m2 != None)
def test_get_openssl_pkey(self):
diff --git a/tools/depgraph2dot.py b/tools/depgraph2dot.py
index b8ecbce..ab07a31 100755
--- a/tools/depgraph2dot.py
+++ b/tools/depgraph2dot.py
@@ -43,7 +43,7 @@ class pydepgraphdot:
# normalise our input data
for k,d in p.items():
for v in d.keys():
- if not p.has_key(v):
+ if v not in p:
p[v] = {}
f = self.get_output_file()
diff --git a/tools/py2depgraph.py b/tools/py2depgraph.py
index 022add3..ef3b6f8 100755
--- a/tools/py2depgraph.py
+++ b/tools/py2depgraph.py
@@ -23,7 +23,7 @@
import sys, pprint
import modulefinder
-focus=[ 'sfa' , 'OpenSSL', 'M2Crypto', 'xmlrpclib', 'threading' ]
+focus = [ 'sfa' , 'OpenSSL', 'M2Crypto', 'xmlrpclib', 'threading' ]
class mymf(modulefinder.ModuleFinder):
def __init__(self,*args,**kwargs):
diff --git a/tools/reset_gids.py b/tools/reset_gids.py
index 21e25ce..e30ed32 100755
--- a/tools/reset_gids.py
+++ b/tools/reset_gids.py
@@ -1,8 +1,6 @@
#!/usr/bin/env python
# -*- coding:utf-8 -*-
-import types
-
from sfa.storage.model import *
from sfa.storage.alchemy import *
from sfa.trust.gid import create_uuid
@@ -22,7 +20,7 @@ def fix_users():
pub_key=getattr(record,'reg_keys',None)
if len(pub_key) > 0:
# use only first key in record
- if pub_key and isinstance(pub_key, types.ListType): pub_key = pub_key[0]
+ if pub_key and isinstance(pub_key, list): pub_key = pub_key[0]
pub_key = pub_key.key
pkey = convert_public_key(pub_key)
urn = Xrn (xrn=record.hrn, type='user').get_urn()
diff --git a/wsdl/sfa2wsdl.py b/wsdl/sfa2wsdl.py
index a9e3c32..2eb9463 100755
--- a/wsdl/sfa2wsdl.py
+++ b/wsdl/sfa2wsdl.py
@@ -17,6 +17,8 @@ from optparse import OptionParser
from sfa.storage.parameter import Parameter, Mixed
+from sfa.util.py23 import StringType
+
plc_ns="http://www.planet-lab.org/sfa"
class SoapError(Exception):
@@ -162,11 +164,11 @@ class WSDLGen:
return "xsd:boolean"
elif arg_type == FloatType:
return "xsd:double"
- elif arg_type in StringTypes:
+ elif issubclass(arg_type, StringType):
return "xsd:string"
else:
pdb.set_trace()
- raise SoapError, "Cannot handle %s objects" % arg_type
+ raise SoapError("Cannot handle %s objects" % arg_type)
def param_type(self, arg):
return (self.name_complex_type(arg))
@@ -188,7 +190,7 @@ class WSDLGen:
in_el.setAttribute("name", method + "_in")
for service_name in function.interfaces:
- if (self.services.has_key(service_name)):
+ if (service_name in self.services):
if (not method in self.services[service_name]):
self.services[service_name].append(method)
else:
diff --git a/xmlbuilder-0.9/LICENSE b/xmlbuilder-0.9/LICENSE
deleted file mode 100644
index 0d0f57c..0000000
--- a/xmlbuilder-0.9/LICENSE
+++ /dev/null
@@ -1,21 +0,0 @@
-The MIT License
-
-Copyright (c) 2008 Konstantin Danilov aka koder
-
-Permission is hereby granted, free of charge, to any person obtaining a copy
-of this software and associated documentation files (the "Software"), to deal
-in the Software without restriction, including without limitation the rights
-to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
-copies of the Software, and to permit persons to whom the Software is
-furnished to do so, subject to the following conditions:
-
-The above copyright notice and this permission notice shall be included in
-all copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
-AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
-OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
-THE SOFTWARE.
diff --git a/xmlbuilder-0.9/MANIFEST.in b/xmlbuilder-0.9/MANIFEST.in
deleted file mode 100644
index 207725e..0000000
--- a/xmlbuilder-0.9/MANIFEST.in
+++ /dev/null
@@ -1,2 +0,0 @@
-include xmlbuild/docs *.rst
-include . LICENSE
\ No newline at end of file
diff --git a/xmlbuilder-0.9/PKG-INFO b/xmlbuilder-0.9/PKG-INFO
deleted file mode 100644
index d931c15..0000000
--- a/xmlbuilder-0.9/PKG-INFO
+++ /dev/null
@@ -1,80 +0,0 @@
-Metadata-Version: 1.0
-Name: xmlbuilder
-Version: 0.9
-Summary: Pythonic way to create xml files
-Home-page: http://pypi.python.org/pypi/xmlbuilder
-Author: koder
-Author-email: koder_dot_mail@gmail_dot_com
-License: MIT
-Download-URL: http://pypi.python.org/pypi/xmlbuilder
-Description: Example of usage:
- -----------------
-
-
- from __future__ import with_statement
- from xmlbuilder import XMLBuilder
- x = XMLBuilder(format=True)
- with x.root(a = 1):
- with x.data:
- [x << ('node',{'val':i}) for i in range(10)]
-
- print str(x)
-
- will print
-
- <root a="1">
- <data>
- <node val="0" />
- <node val="1" />
- <node val="2" />
- <node val="3" />
- <node val="4" />
- <node val="5" />
- <node val="6" />
- <node val="7" />
- <node val="8" />
- <node val="9" />
- </data>
- </root>
-
- Mercurial repo:http://hg.assembla.com/MyPackages/
-
- Documentations
- --------------
- `XMLBuilder` is simple library build on top of `ElementTree.TreeBuilder` to
- simplify xml files creation as much as possible. Althow it can produce
- structured result with identated child tags. `XMLBuilder` use python `with`
- statement to define xml tag levels and `<<` operator for simple cases -
- text and tag without childs.
-
- First we need to create xmlbuilder
-
- from xmlbuilder import XMLBuilder
- # params - encoding = 'utf8',
- # builder = None, - ElementTree.TreeBuilder
- # tab_level = None, - current tab l;evel - for formatted output only
- # format = False, - create formatted output
- # tab_step = " " * 4 - indentation step
- xml = XMLBuilder()
-
-
- Use `with` statement to make document structure
- #create and open tag 'root_tag' with text 'text' and attributes
- with xml.root_tag(text,attr1=val1,attr2=val2):
- #create and open tag 'sub_tag'
- with xml.sub_tag(text,attr3=val3):
- #create tag which are not valid python identificator
- with xml('one-more-sub-tag',attr7=val37):
- xml << "Some textual data"
- #here tag 'one-more-sub-tag' are closed
- #Tags without children can be created using `<<` operator
- for val in range(15):
- xml << ('message',"python rocks!"[:i])
- #create 15 child tag like <message> python r</message>
- #all tags closed
- node = ~x # get etree.ElementTree object
- xml_data = str(x)
- unicode_xml_data = unicode(x)
-
-Keywords: xml
-Platform: UNKNOWN
diff --git a/xmlbuilder-0.9/README.txt b/xmlbuilder-0.9/README.txt
deleted file mode 100644
index 7a7131f..0000000
--- a/xmlbuilder-0.9/README.txt
+++ /dev/null
@@ -1 +0,0 @@
-Pythonic way to build xml files
\ No newline at end of file
diff --git a/xmlbuilder-0.9/setup.cfg b/xmlbuilder-0.9/setup.cfg
deleted file mode 100644
index b14b0bc..0000000
--- a/xmlbuilder-0.9/setup.cfg
+++ /dev/null
@@ -1,5 +0,0 @@
-[egg_info]
-tag_build =
-tag_date = 0
-tag_svn_revision = 0
-
diff --git a/xmlbuilder-0.9/setup.py b/xmlbuilder-0.9/setup.py
deleted file mode 100644
index bac4b12..0000000
--- a/xmlbuilder-0.9/setup.py
+++ /dev/null
@@ -1,43 +0,0 @@
-#!/usr/bin/env python
-#-------------------------------------------------------------------------------
-import os
-import sys
-import glob
-import os.path
-from setuptools import setup
-#from distutils.core import setup
-#-------------------------------------------------------------------------------
-if 'upload' in sys.argv:
- # for .pypirc file
- try:
- os.environ['HOME']
- except KeyError:
- os.environ['HOME'] = '..\\'
-#-------------------------------------------------------------------------------
-fpath = lambda x : os.path.join(*x.split('/'))
-#-------------------------------------------------------------------------------
-PYPI_URL = 'http://pypi.python.org/pypi/xmlbuilder'
-ld = open(fpath('xmlbuilder/docs/long_descr.rst')).read()
-ld = ld.replace('&','&').replace('<','<').replace('>','>')
-setup(
- name = "xmlbuilder",
- fullname = "xmlbuilder",
- version = "0.9",
- packages = ["xmlbuilder"],
- package_dir = {'xmlbuilder':'xmlbuilder'},
- author = "koder",
- author_email = "koder_dot_mail@gmail_dot_com",
- maintainer = 'koder',
- maintainer_email = "koder_dot_mail@gmail_dot_com",
- description = "Pythonic way to create xml files",
- license = "MIT",
- keywords = "xml",
- test_suite = "xml_buider.tests",
- url = PYPI_URL,
- download_url = PYPI_URL,
- long_description = ld,
- #include_package_data = True,
- #package_data = {'xmlbuilder':["docs/*.rst"]},
- #data_files = [('', ['xmlbuilder/docs/long_descr.rst'])]
-)
-#-------------------------------------------------------------------------------
diff --git a/xmlbuilder-0.9/xmlbuilder.egg-info/PKG-INFO b/xmlbuilder-0.9/xmlbuilder.egg-info/PKG-INFO
deleted file mode 100644
index 569b8ab..0000000
--- a/xmlbuilder-0.9/xmlbuilder.egg-info/PKG-INFO
+++ /dev/null
@@ -1,80 +0,0 @@
-Metadata-Version: 1.1
-Name: xmlbuilder
-Version: 0.9
-Summary: Pythonic way to create xml files
-Home-page: http://pypi.python.org/pypi/xmlbuilder
-Author: koder
-Author-email: koder_dot_mail@gmail_dot_com
-License: MIT
-Download-URL: http://pypi.python.org/pypi/xmlbuilder
-Description: Example of usage:
- -----------------
-
-
- from __future__ import with_statement
- from xmlbuilder import XMLBuilder
- x = XMLBuilder(format=True)
- with x.root(a = 1):
- with x.data:
- [x << ('node',{'val':i}) for i in range(10)]
-
- print str(x)
-
- will print
-
- <root a="1">
- <data>
- <node val="0" />
- <node val="1" />
- <node val="2" />
- <node val="3" />
- <node val="4" />
- <node val="5" />
- <node val="6" />
- <node val="7" />
- <node val="8" />
- <node val="9" />
- </data>
- </root>
-
- Mercurial repo:http://hg.assembla.com/MyPackages/
-
- Documentations
- --------------
- `XMLBuilder` is simple library build on top of `ElementTree.TreeBuilder` to
- simplify xml files creation as much as possible. Althow it can produce
- structured result with identated child tags. `XMLBuilder` use python `with`
- statement to define xml tag levels and `<<` operator for simple cases -
- text and tag without childs.
-
- First we need to create xmlbuilder
-
- from xmlbuilder import XMLBuilder
- # params - encoding = 'utf8',
- # builder = None, - ElementTree.TreeBuilder
- # tab_level = None, - current tab l;evel - for formatted output only
- # format = False, - create formatted output
- # tab_step = " " * 4 - indentation step
- xml = XMLBuilder()
-
-
- Use `with` statement to make document structure
- #create and open tag 'root_tag' with text 'text' and attributes
- with xml.root_tag(text,attr1=val1,attr2=val2):
- #create and open tag 'sub_tag'
- with xml.sub_tag(text,attr3=val3):
- #create tag which are not valid python identificator
- with xml('one-more-sub-tag',attr7=val37):
- xml << "Some textual data"
- #here tag 'one-more-sub-tag' are closed
- #Tags without children can be created using `<<` operator
- for val in range(15):
- xml << ('message',"python rocks!"[:i])
- #create 15 child tag like <message> python r</message>
- #all tags closed
- node = ~x # get etree.ElementTree object
- xml_data = str(x)
- unicode_xml_data = unicode(x)
-
-Keywords: xml
-Platform: UNKNOWN
diff --git a/xmlbuilder-0.9/xmlbuilder.egg-info/SOURCES.txt b/xmlbuilder-0.9/xmlbuilder.egg-info/SOURCES.txt
deleted file mode 100644
index 4cc27de..0000000
--- a/xmlbuilder-0.9/xmlbuilder.egg-info/SOURCES.txt
+++ /dev/null
@@ -1,13 +0,0 @@
-LICENSE
-MANIFEST.in
-PKG-INFO
-README.txt
-setup.cfg
-setup.py
-xmlbuilder/__init__.py
-xmlbuilder.egg-info/PKG-INFO
-xmlbuilder.egg-info/SOURCES.txt
-xmlbuilder.egg-info/dependency_links.txt
-xmlbuilder.egg-info/top_level.txt
-xmlbuilder/docs/long_descr.rst
-xmlbuilder/tests/__init__.py
\ No newline at end of file
diff --git a/xmlbuilder-0.9/xmlbuilder.egg-info/dependency_links.txt b/xmlbuilder-0.9/xmlbuilder.egg-info/dependency_links.txt
deleted file mode 100644
index 8b13789..0000000
--- a/xmlbuilder-0.9/xmlbuilder.egg-info/dependency_links.txt
+++ /dev/null
@@ -1 +0,0 @@
-
diff --git a/xmlbuilder-0.9/xmlbuilder.egg-info/top_level.txt b/xmlbuilder-0.9/xmlbuilder.egg-info/top_level.txt
deleted file mode 100644
index 9f1d486..0000000
--- a/xmlbuilder-0.9/xmlbuilder.egg-info/top_level.txt
+++ /dev/null
@@ -1 +0,0 @@
-xmlbuilder
diff --git a/xmlbuilder-0.9/xmlbuilder/__init__.py b/xmlbuilder-0.9/xmlbuilder/__init__.py
deleted file mode 100644
index 24ce7a5..0000000
--- a/xmlbuilder-0.9/xmlbuilder/__init__.py
+++ /dev/null
@@ -1,153 +0,0 @@
-#!/usr/bin/env python
-#-------------------------------------------------------------------------------
-from __future__ import with_statement
-#-------------------------------------------------------------------------------
-from xml.etree.ElementTree import TreeBuilder,tostring
-#-------------------------------------------------------------------------------
-__all__ = ["XMLBuilder"]
-__doc__ = """
-XMLBuilder is simple library build on top of ElementTree.TreeBuilder to
-simplify xml files creation as much as possible. Althow it can produce
-structured result with identated child tags. `XMLBuilder` use python `with`
-statement to define xml tag levels and `<<` operator for simple cases -
-text and tag without childs.
-
-from __future__ import with_statement
-from xmlbuilder import XMLBuilder
-x = XMLBuilder(format=True)
-with x.root(a = 1):
- with x.data:
- [x << ('node',{'val':i}) for i in range(10)]
-
-etree_node = ~x
-print str(x)
-"""
-#-------------------------------------------------------------------------------
-class _XMLNode(object):
- """Class for internal usage"""
- def __init__(self,parent,name,builder):
- self.builder = builder
- self.name = name
- self.text = []
- self.attrs = {}
- self.entered = False
- self.parent = parent
- def __call__(self,*dt,**mp):
- text = "".join(dt)
- if self.entered:
- self.builder.data(text)
- else:
- self.text.append(text)
- if self.entered:
- raise ValueError("Can't add attributes to already opened element")
- smp = dict((k,str(v)) for k,v in mp.items())
- self.attrs.update(smp)
- return self
- def __enter__(self):
- self.parent += 1
- self.builder.start(self.name,self.attrs)
- self.builder.data("".join(self.text))
- self.entered = True
- return self
- def __exit__(self,x,y,z):
- self.parent -= 1
- self.builder.end(self.name)
- return False
-#-------------------------------------------------------------------------------
-class XMLBuilder(object):
- """XmlBuilder(encoding = 'utf-8', # result xml file encoding
- builder = None, #etree.TreeBuilder or compatible class
- tab_level = None, #current tabulation level - string
- format = False, # make formatted output
- tab_step = " " * 4) # tabulation step
- use str(builder) or unicode(builder) to get xml text or
- ~builder to obtaine etree.ElementTree
- """
- def __init__(self,encoding = 'utf-8',
- builder = None,
- tab_level = None,
- format = False,
- tab_step = " " * 4):
- self.__builder = builder or TreeBuilder()
- self.__encoding = encoding
- if format :
- if tab_level is None:
- tab_level = ""
- if tab_level is not None:
- if not format:
- raise ValueError("format is False, but tab_level not None")
- self.__tab_level = tab_level # current format level
- self.__tab_step = tab_step # format step
- self.__has_sub_tag = False # True, if current tag had childrens
- self.__node = None
- # called from _XMLNode when tag opened
- def __iadd__(self,val):
- self.__has_sub_tag = False
- if self.__tab_level is not None:
- self.__builder.data("\n" + self.__tab_level)
- self.__tab_level += self.__tab_step
- return self
- # called from XMLNode when tag closed
- def __isub__(self,val):
- if self.__tab_level is not None:
- self.__tab_level = self.__tab_level[:-len(self.__tab_step)]
- if self.__has_sub_tag:
- self.__builder.data("\n" + self.__tab_level)
- self.__has_sub_tag = True
- return self
- def __getattr__(self,name):
- return _XMLNode(self,name,self.__builder)
- def __call__(self,name,*dt,**mp):
- x = _XMLNode(self,name,self.__builder)
- x(*dt,**mp)
- return x
- #create new tag or add text
- #possible shift values
- #string - text
- #tuple(string1,string2,dict) - new tag with name string1,attrs = dict,and text string2
- #dict and string2 are optional
- def __lshift__(self,val):
- if isinstance(val,basestring):
- self.__builder.data(val)
- else:
- self.__has_sub_tag = True
- assert hasattr(val,'__len__'),\
- 'Shifted value should be tuple or list like object not %r' % val
- assert hasattr(val,'__getitem__'),\
- 'Shifted value should be tuple or list like object not %r' % val
- name = val[0]
- if len(val) == 3:
- text = val[1]
- attrs = val[2]
- elif len(val) == 1:
- text = ""
- attrs = {}
- elif len(val) == 2:
- if isinstance(val[1],basestring):
- text = val[1]
- attrs = {}
- else:
- text = ""
- attrs = val[1]
- if self.__tab_level is not None:
- self.__builder.data("\n" + self.__tab_level)
- self.__builder.start(name,
- dict((k,str(v)) for k,v in attrs.items()))
- if text:
- self.__builder.data(text)
- self.__builder.end(name)
- return self # to allow xml << some1 << some2 << some3
- #close builder
- def __invert__(self):
- if self.__node is not None:
- return self.__node
- self.__node = self.__builder.close()
- return self.__node
- def __str__(self):
- """return generated xml"""
- return tostring(~self,self.__encoding)
- def __unicode__(self):
- """return generated xml"""
- res = tostring(~self,self.__encoding)
- return res.decode(self.__encoding)
-#-------------------------------------------------------------------------------
diff --git a/xmlbuilder-0.9/xmlbuilder/docs/long_descr.rst b/xmlbuilder-0.9/xmlbuilder/docs/long_descr.rst
deleted file mode 100644
index 4e82bc8..0000000
--- a/xmlbuilder-0.9/xmlbuilder/docs/long_descr.rst
+++ /dev/null
@@ -1,68 +0,0 @@
-Example of usage:
------------------
-
-
-from __future__ import with_statement
-from xmlbuilder import XMLBuilder
-x = XMLBuilder(format=True)
-with x.root(a = 1):
- with x.data:
- [x << ('node',{'val':i}) for i in range(10)]
-
-print str(x)
-
-will print
-
-<root a="1">
- <data>
- <node val="0" />
- <node val="1" />
- <node val="2" />
- <node val="3" />
- <node val="4" />
- <node val="5" />
- <node val="6" />
- <node val="7" />
- <node val="8" />
- <node val="9" />
- </data>
-</root>
-
-Mercurial repo:http://hg.assembla.com/MyPackages/
-
-Documentations
---------------
-`XMLBuilder` is simple library build on top of `ElementTree.TreeBuilder` to
-simplify xml files creation as much as possible. Althow it can produce
-structured result with identated child tags. `XMLBuilder` use python `with`
-statement to define xml tag levels and `<<` operator for simple cases -
-text and tag without childs.
-
-First we need to create xmlbuilder
-
- from xmlbuilder import XMLBuilder
- # params - encoding = 'utf8',
- # builder = None, - ElementTree.TreeBuilder
- # tab_level = None, - current tab l;evel - for formatted output only
- # format = False, - create formatted output
- # tab_step = " " * 4 - indentation step
- xml = XMLBuilder()
-
-
-Use `with` statement to make document structure
- #create and open tag 'root_tag' with text 'text' and attributes
- with xml.root_tag(text,attr1=val1,attr2=val2):
- #create and open tag 'sub_tag'
- with xml.sub_tag(text,attr3=val3):
- #create tag which are not valid python identificator
- with xml('one-more-sub-tag',attr7=val37):
- xml << "Some textual data"
- #here tag 'one-more-sub-tag' are closed
- #Tags without children can be created using `<<` operator
- for val in range(15):
- xml << ('message',"python rocks!"[:i])
- #create 15 child tag like <message> python r</message>
- #all tags closed
- node = ~x # get etree.ElementTree object
- xml_data = str(x)
- unicode_xml_data = unicode(x)
diff --git a/xmlbuilder-0.9/xmlbuilder/tests/__init__.py b/xmlbuilder-0.9/xmlbuilder/tests/__init__.py
deleted file mode 100644
index 43a67b1..0000000
--- a/xmlbuilder-0.9/xmlbuilder/tests/__init__.py
+++ /dev/null
@@ -1,99 +0,0 @@
-#!/usr/bin/env python
-from __future__ import with_statement
-#-------------------------------------------------------------------------------
-import unittest
-from xml.etree.ElementTree import fromstring
-#-------------------------------------------------------------------------------
-from xmlbuilder import XMLBuilder
-#-------------------------------------------------------------------------------
-def xmlStructureEqual(xml1,xml2):
- tree1 = fromstring(xml1)
- tree2 = fromstring(xml2)
- return _xmlStructureEqual(tree1,tree2)
-#-------------------------------------------------------------------------------
-def _xmlStructureEqual(tree1,tree2):
- if tree1.tag != tree2.tag:
- return False
- attr1 = list(tree1.attrib.items())
- attr1.sort()
- attr2 = list(tree2.attrib.items())
- attr2.sort()
- if attr1 != attr2:
- return False
- return tree1.getchildren() == tree2.getchildren()
-#-------------------------------------------------------------------------------
-result1 = \
-"""
-<root>
- <array />
- <array len="10">
- <el val="0" />
- <el val="1">xyz</el>
- <el val="2">abc</el>
- <el val="3" />
- <el val="4" />
- <el val="5" />
- <sup-el val="23">test </sup-el>
- </array>
-</root>
-""".strip()
-#-------------------------------------------------------------------------------
-class TestXMLBuilder(unittest.TestCase):
- def testShift(self):
- xml = (XMLBuilder() << ('root',))
- self.assertEqual(str(xml),"<root />")
-
- xml = XMLBuilder()
- xml << ('root',"some text")
- self.assertEqual(str(xml),"<root>some text</root>")
-
- xml = XMLBuilder()
- xml << ('root',{'x':1,'y':'2'})
- self.assert_(xmlStructureEqual(str(xml),"<root x='1' y='2'>some text</root>"))
-
- xml = XMLBuilder()
- xml << ('root',{'x':1,'y':'2'})
- self.assert_(xmlStructureEqual(str(xml),"<root x='1' y='2'></root>"))
-
- xml = XMLBuilder()
- xml << ('root',{'x':1,'y':'2'})
- self.assert_(not xmlStructureEqual(str(xml),"<root x='2' y='2'></root>"))
-
-
- xml = XMLBuilder()
- xml << ('root',"gonduras.ua",{'x':1,'y':'2'})
- self.assert_(xmlStructureEqual(str(xml),"<root x='1' y='2'>gonduras.ua</root>"))
-
- xml = XMLBuilder()
- xml << ('root',"gonduras.ua",{'x':1,'y':'2'})
- self.assert_(xmlStructureEqual(str(xml),"<root x='1' y='2'>gonduras.com</root>"))
- #---------------------------------------------------------------------------
- def testWith(self):
- xml = XMLBuilder()
- with xml.root(lenght = 12):
- pass
- self.assertEqual(str(xml),'<root lenght="12" />')
-
- xml = XMLBuilder()
- with xml.root():
- xml << "text1" << "text2" << ('some_node',)
- self.assertEqual(str(xml),"<root>text1text2<some_node /></root>")
- #---------------------------------------------------------------------------
- def testFormat(self):
- x = XMLBuilder('utf-8',format = True)
- with x.root():
- x << ('array',)
- with x.array(len = 10):
- with x.el(val = 0):
- pass
- with x.el('xyz',val = 1):
- pass
- x << ("el","abc",{'val':2}) << ('el',dict(val=3))
- x << ('el',dict(val=4)) << ('el',dict(val='5'))
- with x('sup-el',val = 23):
- x << "test "
- self.assertEqual(str(x),result1)
-#-------------------------------------------------------------------------------
-if __name__ == '__main__':
- unittest.main()
-#-------------------------------------------------------------------------------