diff --git a/src/commands/cmdhandler.py b/src/commands/cmdhandler.py index adf2e9a960..0a3e7bbde7 100644 --- a/src/commands/cmdhandler.py +++ b/src/commands/cmdhandler.py @@ -316,6 +316,7 @@ def cmdhandler(called_on, raw_string, testing=False, callertype="session", sessi else: # fallback to default error text sysarg = _("Command '%s' is not available.") % raw_string + cmdset.get_all_cmd_keys_and_aliases(caller) suggestions = string_suggestions(raw_string, cmdset.get_all_cmd_keys_and_aliases(caller), cutoff=0.7, maxnum=3) if suggestions: sysarg += _(" Maybe you meant %s?") % utils.list_to_string(suggestions, _('or'), addquote=True) @@ -332,7 +333,7 @@ def cmdhandler(called_on, raw_string, testing=False, callertype="session", sessi if syscmd: # replace system command with custom version cmd = syscmd - cmd.sessid = caller.sessid if callertype=="session" else None + cmd.sessid = session.sessid if session else None sysarg = "%s:%s" % (cmdname, args) raise ExecSystemCommand(cmd, sysarg) @@ -398,7 +399,7 @@ def cmdhandler(called_on, raw_string, testing=False, callertype="session", sessi syscmd.cmdstring = syscmd.key syscmd.args = sysarg syscmd.cmdset = cmdset - syscmd.sessid = caller.sessid if callertype=="session" else None + syscmd.sessid = session.sessid if session else None syscmd.raw_string = unformatted_raw_string if hasattr(syscmd, 'obj') and hasattr(syscmd.obj, 'scripts'): diff --git a/src/commands/default/admin.py b/src/commands/default/admin.py index 5598776d48..51d4afbdbc 100644 --- a/src/commands/default/admin.py +++ b/src/commands/default/admin.py @@ -527,7 +527,7 @@ class CmdPerm(MuxCommand): tstring = "" if 'del' in switches: # delete the given permission(s) from object. - obj.permission.remove(self.rhslist) + obj.permissions.remove(self.rhslist) cstring += "\nPermission(s) %s removed from %s (if they existed)." % (", ".join(self.rhslist), obj.name) tstring += "\n%s revokes the permission(s) %s from you." % (caller.name, ", ".join(self.rhslist)) else: diff --git a/src/comms/migrations/0014_transfer_channels.py b/src/comms/migrations/0014_transfer_channels.py index 4cfff48010..984fb6f6df 100644 --- a/src/comms/migrations/0014_transfer_channels.py +++ b/src/comms/migrations/0014_transfer_channels.py @@ -8,7 +8,7 @@ class Migration(DataMigration): def forwards(self, orm): "Write your forwards methods here." - # Note: Don't use "from appname.models import ModelName". + # Note: Don't use "from appname.models import ModelName". # Use orm.ModelName to refer to models in this application, # and orm['appname.ModelName'] for models in other applications. ChannelDB = orm['comms.ChannelDB'] @@ -28,8 +28,12 @@ class Migration(DataMigration): new_channel.db_attributes.add(keep_log) for name in [alias.strip() for alias in channel.db_aliases.split(',')]: - tag = Tag(db_key=name, db_category='comm_alias') - tag.save() + tag = Tag.objects.filter(db_key=name.lower().strip(), db_category='comm_alias') + if tag: + tag = tag[0] + else: + tag = Tag(db_key=name.lower().strip(), db_category='comm_alias') + tag.save() new_channel.db_tags.add(tag) new_channel.save() diff --git a/src/comms/models.py b/src/comms/models.py index 4e449bae8f..0ef8f5776c 100644 --- a/src/comms/models.py +++ b/src/comms/models.py @@ -342,10 +342,6 @@ class ChannelDB(TypedObject): _typeclass_paths = settings.COMM_TYPECLASS_PATHS _default_typeclass_path = settings.BASE_COMM_TYPECLASS or "src.comms.comms.Comm" - class Meta: - "Define Django meta options" - verbose_name = "Channel" - def __init__(self, *args, **kwargs): TypedObject.__init__(self, *args, **kwargs) _SA(self, "tags", TagHandler(self, category_prefix="comm_")) diff --git a/src/objects/migrations/0024_move_permissions_to_handler.py b/src/objects/migrations/0024_move_permissions_to_handler.py index 957b1714fd..e20eeb0f35 100644 --- a/src/objects/migrations/0024_move_permissions_to_handler.py +++ b/src/objects/migrations/0024_move_permissions_to_handler.py @@ -11,12 +11,16 @@ class Migration(DataMigration): # Note: Don't use "from appname.models import ModelName". # Use orm.ModelName to refer to models in this application, # and orm['appname.ModelName'] for models in other applications. - + Tag = orm['typeclasses.Tag'] for obj in orm.ObjectDB.objects.all(): if obj.db_permissions: for perm in [perm.strip() for perm in obj.db_permissions.split(",")]: - tag = orm['typeclasses.Tag'].create(db_key=perm, db_category="permissions") - tag.save() + tag = Tag.objects.filter(db_key=perm.lower().strip(), db_category="permissions") + if tag: + tag = tag[0] + else: + tag = Tag(db_key=perm.lower().strip(), db_category="permissions") + tag.save() obj.db_tags.add(tag) def backwards(self, orm): diff --git a/src/players/migrations/0027_move_permissions_to_handler.py b/src/players/migrations/0027_move_permissions_to_handler.py index dc779b8940..074244e711 100644 --- a/src/players/migrations/0027_move_permissions_to_handler.py +++ b/src/players/migrations/0027_move_permissions_to_handler.py @@ -11,11 +11,16 @@ class Migration(DataMigration): # Note: Don't use "from appname.models import ModelName". # Use orm.ModelName to refer to models in this application, # and orm['appname.ModelName'] for models in other applications. + Tag = orm['typeclasses.Tag'] for obj in orm.PlayerDB.objects.all(): if obj.db_permissions: for perm in [perm.strip() for perm in obj.db_permissions.split(",")]: - tag = orm['typeclasses.Tag'].create(db_key=perm, db_category="permissions") - tag.save() + tag = Tag.objects.filter(db_key=perm.lower().strip(), db_category="permissions") + if tag: + tag = tag[0] + else: + tag = Tag(db_key=perm.lower().strip(), db_category="permissions") + tag.save() obj.db_tags.add(tag) def backwards(self, orm): diff --git a/src/scripts/migrations/0015_move_permissions_to_handler.py b/src/scripts/migrations/0015_move_permissions_to_handler.py index be23123f0a..b3a764e0f1 100644 --- a/src/scripts/migrations/0015_move_permissions_to_handler.py +++ b/src/scripts/migrations/0015_move_permissions_to_handler.py @@ -11,11 +11,16 @@ class Migration(DataMigration): # Note: Don't use "from appname.models import ModelName". # Use orm.ModelName to refer to models in this application, # and orm['appname.ModelName'] for models in other applications. + Tag = orm['typeclasses.Tag'] for obj in orm.ScriptDB.objects.all(): if obj.db_permissions: for perm in [perm.strip() for perm in obj.db_permissions.split(",")]: - tag = orm['typeclasses.Tag'].create(db_key=perm, db_category="permissions") - tag.save() + tag = Tag.objects.filter(db_key=perm.lower().strip(), db_category="permissions") + if tag: + tag = tag[0] + else: + tag = Tag(db_key=perm.lower().strip(), db_category="permissions") + tag.save() obj.db_tags.add(tag) def backwards(self, orm): diff --git a/src/server/amp.py b/src/server/amp.py index 528aa0bab8..f6ec7b86e3 100644 --- a/src/server/amp.py +++ b/src/server/amp.py @@ -240,7 +240,7 @@ class AMPProtocol(amp.AMP): def errback(self, e, info): "error handler, to avoid dropping connections on server tracebacks." - e.trap(Exception) + f = e.trap(Exception) print "AMP Error for %(info)s: %(e)s" % {'info': info, 'e': e.getErrorMessage()} def send_split_msg(self, sessid, msg, data, command): @@ -286,7 +286,7 @@ class AMPProtocol(amp.AMP): data comes in multiple chunks; if so (nparts>1) we buffer the data and wait for the remaining parts to arrive before continuing. """ - #print "msg portal -> server (server side):", sessid, msg + #print "msg portal -> server (server side):", sessid, msg, data global MSGBUFFER if nparts > 1: # a multipart message @@ -311,7 +311,7 @@ class AMPProtocol(amp.AMP): try: return self.callRemote(MsgPortal2Server, sessid=sessid, - msg=msg, + msg=to_str(msg) if msg!=None else "", ipart=0, nparts=1, data=dumps(data)).addErrback(self.errback, "MsgPortal2Server") @@ -351,7 +351,7 @@ class AMPProtocol(amp.AMP): try: return self.callRemote(MsgServer2Portal, sessid=sessid, - msg=to_str(msg), + msg=to_str(msg) if msg!=None else "", ipart=0, nparts=1, data=dumps(data)).addErrback(self.errback, "MsgServer2Portal") diff --git a/src/server/oobhandler.py b/src/server/oobhandler.py index 14d4040fd3..2bdaaad234 100644 --- a/src/server/oobhandler.py +++ b/src/server/oobhandler.py @@ -22,23 +22,23 @@ oob trackers should inherit from the OOBTracker class in this """ +from inspect import isfunction from django.conf import settings from src.server.models import ServerConfig from src.server.sessionhandler import SESSIONS from src.scripts.scripts import Script -from src.create import create_script -from src.utils.dbserialize import dbserialize, dbunserialize, pack_dbobj +from src.utils.create import create_script +from src.utils.dbserialize import dbserialize, dbunserialize, pack_dbobj, unpack_dbobj from src.utils import logger -from src.utils.utils import variable_from_module, to_str +from src.utils.utils import all_from_module, to_str, is_iter, make_iter _SA = object.__setattr__ _GA = object.__getattribute__ -_DA = object.__delattribute__ +_DA = object.__delattr__ -# trackers track property changes and keep returning until they are removed -_OOB_TRACKERS = variable_from_module(settings.OBB_PLUGIN_MODULE, "OBB_TRACKERS", default={}) -# functions return immediately -_OOB_FUNCS = variable_from_module(settings.OBB_PLUGIN_MODULE, "OBB_FUNCS", default={}) +# load from plugin module +_OOB_FUNCS = dict((key.lower(), func) for key, func in all_from_module(settings.OOB_PLUGIN_MODULE).items() if isfunction(func)) +_OOB_ERROR = _OOB_FUNCS.get("oob_error", None) class TrackerHandler(object): @@ -52,30 +52,34 @@ class TrackerHandler(object): """ This is initiated and stored on the object as a property _trackerhandler. """ - self.obj = obj.dbobj + try: obj = obj.dbobj + except AttributeError: pass + self.obj = obj self.ntrackers = 0 # initiate store only with valid on-object fieldnames self.tracktargets = dict((key, {}) for key in _GA(_GA(self.obj, "_meta"), "get_all_field_names")()) - def add(self, fieldname, trackerkey, trackerobj): + def add(self, fieldname, tracker): """ Add tracker to the handler. Raises KeyError if fieldname does not exist. """ - self.tracktargets[fieldname][trackerkey] = trackerobj + trackerkey = tracker.__class__.__name__ + self.tracktargets[fieldname][trackerkey] = tracker self.ntrackers += 1 - def remove(self, fieldname, trackerkey, *args, **kwargs): + def remove(self, fieldname, trackerclass, *args, **kwargs): """ Remove tracker from handler. Raises KeyError if tracker is not found. """ - oobobj = self.tracktargets[fieldname][trackerkey] + trackerkey = trackerclass.__name__ + tracker = self.tracktargets[fieldname][trackerkey] try: - oobobj.at_delete(*args, **kwargs) + tracker.at_delete(*args, **kwargs) except Exception: logger.log_trace() - del oobobj + del tracker self.ntrackers -= 1 if self.ntrackers <= 0: # if there are no more trackers, clean this handler @@ -85,9 +89,9 @@ class TrackerHandler(object): """ Called by the field when it updates to a new value """ - for trackerobj in self.tracktargets[fieldname].values(): + for tracker in self.tracktargets[fieldname].values(): try: - trackerobj.update(fieldname, new_value) + tracker.update(new_value) except Exception: logger.log_trace() @@ -104,84 +108,61 @@ class TrackerBase(object): "Called when tracker is removed" pass -# Default tracker OOB class - -class OOBTracker(TrackerBase): +class _RepeaterScript(Script): """ - A OOB object that passively sends data to a stored sessid whenever - a named database field changes. + Repeating and subscription-enabled script for triggering OOB + functions. Maintained in a _RepeaterPool. """ - def __init__(self, fieldname, sessid, *args, **kwargs): - """ - name - name of entity to track, such as "db_key" - track_type - one of "field", "prop" or "attr" for Database fields, - non-database Property or Attribute - sessid - sessid of session to report to - """ - self.fieldname = fieldname - self.sessid = sessid + def at_script_creation(self): + "Called when script is initialized" + self.key = "oob_func" + self.desc = "OOB functionality script" + self.persistent = False #oob scripts should always be non-persistent + self.ndb.subscriptions = {} - def update(self, new_value, *args, **kwargs): - "Called by cache when updating the tracked entitiy" - SESSIONS.session_from_sessid(self.sessid).msg(oob={"cmdkey":"trackreturn", - "name":self.fieldname, - "value":new_value}) + def at_repeat(self): + """ + Calls subscriptions every self.interval seconds + """ + for (func_key, sessid, interval, args, kwargs) in self.ndb.subscriptions.values(): + session = SESSIONS.session_from_sessid(sessid) + OOB_HANDLER.execute_cmd(session, func_key, *args, **kwargs) + def subscribe(self, store_key, sessid, func_key, interval, *args, **kwargs): + """ + Sign up a subscriber to this oobfunction. Subscriber is + a database object with a dbref. + """ + self.ndb.subscriptions[store_key] = (func_key, sessid, interval, args, kwargs) + + def unsubscribe(self, store_key): + """ + Unsubscribe from oobfunction. Returns True if removal was + successful, False otherwise + """ + self.ndb.subscriptions.pop(store_key, None) class _RepeaterPool(object): """ This maintains a pool of _RepeaterScript scripts, ordered one per interval. It will automatically cull itself once a given interval's script has no more subscriptions. + + This is used and accessed from oobhandler.repeat/unrepeat """ - class _RepeaterScript(Script): - """ - Repeating script for triggering OOB functions. Maintained in the pool. - """ - def at_script_creation(self): - "Called when script is initialized" - self.key = "oob_func" - self.desc = "OOB functionality script" - self.persistent = False #oob scripts should always be non-persistent - self.ndb.subscriptions = {} - - def at_repeat(self): - """ - Calls subscriptions every self.interval seconds - """ - for (func_key, caller, interval, args, kwargs) in self.ndb.subscriptions.values(): - try: - _OOB_FUNCS[func_key](caller, *args, **kwargs) - except Exception: - logger.log_trace() - - def subscribe(self, store_key, caller, func_key, interval, *args, **kwargs): - """ - Sign up a subscriber to this oobfunction. Subscriber is - a database object with a dbref. - """ - self.ndb.subscriptions[store_key] = (func_key, caller, interval, args, kwargs) - - def unsubscribe(self, store_key): - """ - Unsubscribe from oobfunction. Returns True if removal was - successful, False otherwise - """ - self.ndb.subscriptions.pop(store_key, None) - def __init__(self): self.scripts = {} - def add(self, store_key, caller, func_key, interval, *args, **kwargs): + def add(self, store_key, sessid, func_key, interval, *args, **kwargs): """ Add a new tracking """ if interval not in self.scripts: # if no existing interval exists, create new script to fill the gap - new_tracker = create_script(self._RepeaterScript, key="oob_repeater_%is" % interval, interval=interval) + new_tracker = create_script(_RepeaterScript, key="oob_repeater_%is" % interval, interval=interval) self.scripts[interval] = new_tracker - self.scripts[interval].subscribe(store_key, caller, func_key, interval, *args, **kwargs) + self.scripts[interval].subscribe(store_key, sessid, func_key, interval, *args, **kwargs) def remove(self, store_key, interval): """ @@ -193,14 +174,14 @@ class _RepeaterPool(object): # no more subscriptions for this interval. Clean out the script. self.scripts[interval].stop() + def stop(self): + """ + Stop all scripts in pool. This is done at server reload since restoring the pool + will automatically re-populate the pool. + """ + for script in self.scripts.values(): + script.stop() -# Default OOB funcs - -def OOB_get_attr_val(caller, attrname): - "Get the given attrback from caller" - caller.msg(oob={"cmdkey":"get_attr", - "name":attrname, - "value":to_str(caller.attributes.get(attrname))}) # Main OOB Handler @@ -214,6 +195,7 @@ class OOBHandler(object): """ Initialize handler """ + self.sessionhandler = SESSIONS self.oob_tracker_storage = {} self.oob_repeat_storage = {} self.oob_tracker_pool = _RepeaterPool() @@ -224,9 +206,12 @@ class OOBHandler(object): ServerConf field """ if self.oob_tracker_storage: + #print "saved tracker_storage:", self.oob_tracker_storage ServerConfig.objects.conf(key="oob_tracker_storage", value=dbserialize(self.oob_tracker_storage)) if self.oob_repeat_storage: + #print "saved repeat_storage:", self.oob_repeat_storage ServerConfig.objects.conf(key="oob_repeat_storage", value=dbserialize(self.oob_repeat_storage)) + self.oob_tracker_pool.stop() def restore(self): """ @@ -237,88 +222,102 @@ class OOBHandler(object): tracker_storage = ServerConfig.objects.conf(key="oob_tracker_storage") if tracker_storage: self.oob_tracker_storage = dbunserialize(tracker_storage) - for tracker_key, (obj, sessid, fieldname, args, kwargs) in self.oob_tracker_storage.items(): - self.track(obj, sessid, fieldname, tracker_key, *args, **kwargs) + #print "recovered from tracker_storage:", self.oob_tracker_storage + for (obj, sessid, fieldname, trackerclass, args, kwargs) in self.oob_tracker_storage.values(): + self.track(unpack_dbobj(obj), sessid, fieldname, trackerclass, *args, **kwargs) + # make sure to purce the storage + ServerConfig.objects.conf(key="oob_tracker_storage", delete=True) repeat_storage = ServerConfig.objects.conf(key="oob_repeat_storage") if repeat_storage: self.oob_repeat_storage = dbunserialize(repeat_storage) - for func_key, (caller, func_key, interval, args, kwargs) in self.oob_repeat_storage.items(): - self.repeat(caller, func_key, interval, *args, **kwargs) + #print "recovered from repeat_storage:", self.oob_repeat_storage + for (obj, sessid, func_key, interval, args, kwargs) in self.oob_repeat_storage.values(): + self.repeat(unpack_dbobj(obj), sessid, func_key, interval, *args, **kwargs) + # make sure to purge the storage + ServerConfig.objects.conf(key="oob_repeat_storage", delete=True) - - def track(self, obj, sessid, fieldname, tracker_key, *args, **kwargs): + def track(self, obj, sessid, fieldname, trackerclass, *args, **kwargs): """ Create an OOB obj of class _oob_MAPPING[tracker_key] on obj. args, kwargs will be used to initialize the OOB hook before adding it to obj. If property_key is not given, but the OOB has a class property property_name, this will be used as the property name when assigning the OOB to - obj, otherwise tracker_key is ysed as the property name. + obj, otherwise tracker_key is used as the property name. """ - oobclass = _OOB_TRACKERS[tracker_key] # raise traceback if not found + try: obj = obj.dbobj + except AttributeError: pass + if not "_trackerhandler" in _GA(obj, "__dict__"): # assign trackerhandler to object _SA(obj, "_trackerhandler", TrackerHandler(obj)) # initialize object - oob = oobclass(obj, sessid, fieldname, *args, **kwargs) - _GA(obj, "_trackerhandler").add(oob, fieldname) - + tracker = trackerclass(self, fieldname, sessid, *args, **kwargs) + _GA(obj, "_trackerhandler").add(fieldname, tracker) # store calling arguments as a pickle for retrieval later - storekey = (pack_dbobj(obj), sessid, fieldname) - stored = (obj, sessid, fieldname, args, kwargs) + obj_packed = pack_dbobj(obj) + storekey = (obj_packed, sessid, fieldname) + stored = (obj_packed, sessid, fieldname, trackerclass, args, kwargs) self.oob_tracker_storage[storekey] = stored - def untrack(self, obj, sessid, fieldname, tracker_key, *args, **kwargs): + def untrack(self, obj, sessid, fieldname, trackerclass, *args, **kwargs): """ Remove the OOB from obj. If oob implements an at_delete hook, this will be called with args, kwargs """ + try: obj = obj.dbobj + except AttributeError: pass + try: # call at_delete hook - _GA(obj, "_trackerhandler").remove(fieldname, tracker_key, *args, **kwargs) + _GA(obj, "_trackerhandler").remove(fieldname, trackerclass, *args, **kwargs) except AttributeError: pass # remove the pickle from storage store_key = (pack_dbobj(obj), sessid, fieldname) self.oob_tracker_storage.pop(store_key, None) - def track_field(self, obj, sessid, field_name, tracker_key="oobtracker"): + def track_field(self, obj, sessid, field_name, trackerclass): """ Shortcut wrapper method for specifically tracking a database field. - Uses OOBTracker by default (change tracker_key to redirect) - Will create a tracker with a property name that the field cache - expects + Takes the tracker class as argument. """ # all database field names starts with db_* field_name = field_name if field_name.startswith("db_") else "db_%s" % field_name - oob_tracker_name = "_track_%s_change" % field_name # field cache looks for name on this form - self.track(obj, tracker_key, field_name, sessid, property_name=oob_tracker_name) + self.track(obj, sessid, field_name, trackerclass) - def track_attribute(self, obj, sessid, attr_name, tracker_key="oobtracker"): + def untrack_field(self, obj, sessid, field_name): + """ + Shortcut for untracking a database field. Uses OOBTracker by defualt + """ + field_name = field_name if field_name.startswith("db_") else "db_%s" % field_name + self.untrack(obj, sessid, field_name) + + def track_attribute(self, obj, sessid, attr_name, trackerclass): """ Shortcut wrapper method for specifically tracking the changes of an Attribute on an object. Will create a tracker on the Attribute Object and name in a way the Attribute expects. """ # get the attribute object if we can + try: obj = obj.dbobj + except AttributeError: pass attrobj = _GA(obj, "attributes").get(attr_name, return_obj=True) if attrobj: - oob_tracker_name = "_track_db_value_change" - self.track(attrobj, tracker_key, attr_name, sessid, property_name=oob_tracker_name) + self.track(attrobj, sessid, "db_value", trackerclass, attr_name) - def run(self, func_key, *args, **kwargs): + def untrack_attribute(self, obj, sessid, attr_name, trackerclass): """ - Retrieve oobfunc from OOB_FUNCS and execute it immediately - using *args and **kwargs + Shortcut for deactivating tracking for a given attribute. """ - oobfunc = _OOB_FUNCS[func_key] # raise traceback if not found - try: - oobfunc(*args, **kwargs) - except Exception: - logger.log_trace() + try: obj = obj.dbobj + except AttributeError: pass + attrobj = _GA(obj, "attributes").get(attr_name, return_obj=True) + if attrobj: + self.untrack(attrobj, sessid, attr_name, trackerclass) - def repeat(self, caller, func_key, interval=20, *args, **kwargs): + def repeat(self, obj, sessid, func_key, interval=20, *args, **kwargs): """ Start a repeating action. Every interval seconds, the oobfunc corresponding to func_key is called with @@ -326,20 +325,59 @@ class OOBHandler(object): """ if not func_key in _OOB_FUNCS: raise KeyError("%s is not a valid OOB function name.") - store_key = (pack_dbobj(caller), func_key, interval) + try: + obj = obj.dbobj + except AttributeError: + pass + store_obj = pack_dbobj(obj) + store_key = (store_obj, sessid, func_key, interval) # prepare to store - self.oob_repeat_storage[store_key] = (caller, func_key, interval, args, kwargs) - self.oob_tracker_pool.add(store_key, caller, func_key, interval, *args, **kwargs) + self.oob_repeat_storage[store_key] = (store_obj, sessid, func_key, interval, args, kwargs) + self.oob_tracker_pool.add(store_key, sessid, func_key, interval, *args, **kwargs) - def unrepeat(self, caller, func_key, interval=20): + def unrepeat(self, obj, sessid, func_key, interval=20): """ Stop a repeating action """ - store_key = (pack_dbobj(caller), func_key, interval) + try: + obj = obj.dbobj + except AttributeError: + pass + store_key = (pack_dbobj(obj), sessid, func_key, interval) self.oob_tracker_pool.remove(store_key, interval) self.oob_repeat_storage.pop(store_key, None) + def msg(self, sessid, funcname, *args, **kwargs): + "Shortcut to relay oob data back to portal" + session = self.sessionhandler.session_from_sessid(sessid) + #print "oobhandler msg:", sessid, session, funcname, args, kwargs + if session: + session.msg(oob=(funcname, args, kwargs)) + # access method - called from msg() + def execute_cmd(self, session, func_key, *args, **kwargs): + """ + Retrieve oobfunc from OOB_FUNCS and execute it immediately + using *args and **kwargs + """ + try: + #print "OOB execute_cmd:", session, func_key, args, kwargs, _OOB_FUNCS.keys() + oobfunc = _OOB_FUNCS[func_key] # raise traceback if not found + oobfunc(self, session, *args, **kwargs) + except KeyError,e: + errmsg = "OOB Error: function '%s' not recognized: %s" % (func_key, e) + if _OOB_ERROR: + _OOB_ERROR(self, session, errmsg, *args, **kwargs) + else: + logger.log_trace(errmsg) + raise + except Exception, err: + errmsg = "OOB Error: Exception in '%s'(%s, %s):\n%s" % (func_key, args, kwargs, err) + if _OOB_ERROR: + _OOB_ERROR(self, session, errmsg, *args, **kwargs) + else: + logger.log_trace(errmsg) + raise # access object OOB_HANDLER = OOBHandler() diff --git a/src/server/portal/msdp.py b/src/server/portal/msdp.py index 86e2847dfc..08b73da480 100644 --- a/src/server/portal/msdp.py +++ b/src/server/portal/msdp.py @@ -7,12 +7,10 @@ http://tintin.sourceforge.net/msdp/. MSDP manages out-of-band communication between the client and server, for updating health bars etc. -!TODO - this is just a partial implementation and not used by telnet yet. - """ import re from django.conf import settings -from src.utils.utils import make_iter, mod_import +from src.utils.utils import make_iter, mod_import, to_str from src.utils import logger # MSDP-relevant telnet cmd/opt-codes @@ -28,94 +26,13 @@ IAC = chr(255) SB = chr(250) SE = chr(240) +force_str = lambda inp: to_str(inp, force_string=True) + # pre-compiled regexes regex_array = re.compile(r"%s(.*?)%s%s(.*?)%s" % (MSDP_VAR, MSDP_VAL, MSDP_ARRAY_OPEN, MSDP_ARRAY_CLOSE)) # return 2-tuple regex_table = re.compile(r"%s(.*?)%s%s(.*?)%s" % (MSDP_VAR, MSDP_VAL, MSDP_TABLE_OPEN, MSDP_TABLE_CLOSE)) # return 2-tuple (may be nested) -regex_varval = re.compile(r"%s(.*?)%s(.*)" % (MSDP_VAR, MSDP_VAL)) # return 2-tuple - -# MSDP default definition commands supported by Evennia (can be supplemented with custom commands as well) -MSDP_COMMANDS = ("LIST", "REPORT", "RESET", "SEND", "UNREPORT") - -# fallbacks if no custom OOB module is available -MSDP_COMMANDS_CUSTOM = {} -# MSDP_REPORTABLE is a standard suggestions for making it easy to create generic guis. -# this maps MSDP command names to Evennia commands found in OOB_FUNC_MODULE. It -# is up to these commands to return data on proper form. This is overloaded if -# OOB_REPORTABLE is defined in the custom OOB module below. -MSDP_REPORTABLE = { - # General - "CHARACTER_NAME": "get_character_name", - "SERVER_ID": "get_server_id", - "SERVER_TIME": "get_server_time", - # Character - "AFFECTS": "char_affects", - "ALIGNMENT": "char_alignment", - "EXPERIENCE": "char_experience", - "EXPERIENCE_MAX": "char_experience_max", - "EXPERIENCE_TNL": "char_experience_tnl", - "HEALTH": "char_health", - "HEALTH_MAX": "char_health_max", - "LEVEL": "char_level", - "RACE": "char_race", - "CLASS": "char_class", - "MANA": "char_mana", - "MANA_MAX": "char_mana_max", - "WIMPY": "char_wimpy", - "PRACTICE": "char_practice", - "MONEY": "char_money", - "MOVEMENT": "char_movement", - "MOVEMENT_MAX": "char_movement_max", - "HITROLL": "char_hitroll", - "DAMROLL": "char_damroll", - "AC": "char_ac", - "STR": "char_str", - "INT": "char_int", - "WIS": "char_wis", - "DEX": "char_dex", - "CON": "char_con", - # Combat - "OPPONENT_HEALTH": "opponent_health", - "OPPONENT_HEALTH_MAX":"opponent_health_max", - "OPPONENT_LEVEL": "opponent_level", - "OPPONENT_NAME": "opponent_name", - # World - "AREA_NAME": "area_name", - "ROOM_EXITS": "area_room_exits", - "ROOM_NAME": "room_name", - "ROOM_VNUM": "room_dbref", - "WORLD_TIME": "world_time", - # Configurable variables - "CLIENT_ID": "client_id", - "CLIENT_VERSION": "client_version", - "PLUGIN_ID": "plugin_id", - "ANSI_COLORS": "ansi_colours", - "XTERM_256_COLORS": "xterm_256_colors", - "UTF_8": "utf_8", - "SOUND": "sound", - "MXP": "mxp", - # GUI variables - "BUTTON_1": "button1", - "BUTTON_2": "button2", - "BUTTON_3": "button3", - "BUTTON_4": "button4", - "BUTTON_5": "button5", - "GAUGE_1": "gauge1", - "GAUGE_2": "gauge2", - "GAUGE_3": "gauge3", - "GAUGE_4": "gauge4", - "GAUGE_5": "gauge5"} -MSDP_SENDABLE = MSDP_REPORTABLE - -# try to load custom OOB module -OOB_MODULE = None#mod_import(settings.OOB_FUNC_MODULE) -if OOB_MODULE: - # loading customizations from OOB_FUNC_MODULE if available - try: MSDP_REPORTABLE = OOB_MODULE.OOB_REPORTABLE # replaces the default MSDP definitions - except AttributeError: pass - try: MSDP_SENDABLE = OOB_MODULE.OOB_SENDABLE - except AttributeError: MSDP_SENDABLE = MSDP_REPORTABLE - try: MSDP_COMMANDS_CUSTOM = OOB_MODULE.OOB_COMMANDS - except: pass +regex_var = re.compile(MSDP_VAR) +regex_val = re.compile(MSDP_VAL) # Msdp object handler @@ -132,69 +49,97 @@ class Msdp(object): """ self.protocol = protocol self.protocol.protocol_flags['MSDP'] = False - self.protocol.negotiationMap[MSDP] = self.msdp_to_func + self.protocol.negotiationMap[MSDP] = self.msdp_to_evennia self.protocol.will(MSDP).addCallbacks(self.do_msdp, self.no_msdp) self.msdp_reported = {} def no_msdp(self, option): "No msdp supported or wanted" - print "No msdp supported" pass def do_msdp(self, option): """ Called when client confirms that it can do MSDP. """ - print "msdp supported" self.protocol.protocol_flags['MSDP'] = True - def parse_msdp(self, args): - "Called with arguments to subnegotiation" - - def func_to_msdp(self, cmdname, data): + def evennia_to_msdp(self, cmdname, *args, **kwargs): """ handle return data from cmdname by converting it to a proper msdp structure. data can either be a single value (will be converted to a string), a list (will be converted to an MSDP_ARRAY), or a dictionary (will be converted to MSDP_TABLE). - OBS - this supports nested tables and even arrays nested - inside tables, as opposed to the receive method. Arrays - cannot hold tables by definition (the table must be named - with MSDP_VAR, and an array can only contain MSDP_VALs). + OBS - there is no actual use of arrays and tables in the MSDP + specification or default commands -- are returns are implemented + as simple lists or named lists (our name for them here, these + un-bounded structures are not named in the specification). So for + now, this routine will not explicitly create arrays nor tables, + although there are helper methods ready should it be needed in + the future. """ - def make_table(name, datadict, string): + def make_table(name, **kwargs): "build a table that may be nested with other tables or arrays." - string += MSDP_VAR + name + MSDP_VAL + MSDP_TABLE_OPEN - for key, val in datadict.items(): - if type(val) == type({}): - string += make_table(key, val, string) + string = MSDP_VAR + force_str(name) + MSDP_VAL + MSDP_TABLE_OPEN + for key, val in kwargs.items(): + if isinstance(val, dict): + string += make_table(string, key, **val) elif hasattr(val, '__iter__'): - string += make_array(key, val, string) + string += make_array(string, key, *val) else: - string += MSDP_VAR + key + MSDP_VAL + val + string += MSDP_VAR + force_str(key) + MSDP_VAL + force_str(val) string += MSDP_TABLE_CLOSE - return string + return stringk - def make_array(name, datalist, string): - "build a simple array. Arrays may not nest tables by definition." - string += MSDP_VAR + name + MSDP_ARRAY_OPEN - for val in datalist: - string += MSDP_VAL + val + def make_array(name, *args): + "build a array. Arrays may not nest tables by definition." + string = MSDP_VAR + force_str(name) + MSDP_ARRAY_OPEN + string += MSDP_VAL.join(force_str(arg) for arg in args) string += MSDP_ARRAY_CLOSE return string - if isinstance(data, dict): - msdp_string = make_table(cmdname, data, "") - elif hasattr(data, '__iter__'): - msdp_string = make_array(cmdname, data, "") + def make_list(name, *args): + "build a simple list - an array without start/end markers" + string = MSDP_VAR + force_str(name) + string += MSDP_VAL.join(force_str(arg) for arg in args) + return string + + def make_named_list(name, **kwargs): + "build a named list - a table without start/end markers" + string = MSDP_VAR + force_str(name) + for key, val in kwargs.items(): + string += MSDP_VAR + force_str(key) + MSDP_VAL + force_str(val) + return string + + # Default MSDP commands + + print "MSDP outgoing:", cmdname, args, kwargs + + cupper = cmdname.upper() + if cupper == "LIST": + if args: + args = list(args) + mode = args.pop(0).upper() + self.data_out(make_array(mode, *args)) + elif cupper == "REPORT": + self.data_out(make_list("REPORT", *args)) + elif cupper == "UNREPORT": + self.data_out(make_list("UNREPORT", *args)) + elif cupper == "RESET": + self.data_out(make_list("RESET", *args)) + elif cupper == "SEND": + self.data_out(make_named_list("SEND", **kwargs)) else: - msdp_string = MSDP_VAR + cmdname + MSDP_VAL + data - return msdp_string + # return list or named lists. + msdp_string = "" + if args: + msdp_string += make_list(cupper, *args) + if kwargs: + msdp_string += make_named_list(cupper, **kwargs) + self.data_out(msdp_string) - - def msdp_to_func(self, data): + def msdp_to_evennia(self, data): """ Handle a client's requested negotiation, converting it into a function mapping - either one of the MSDP @@ -218,143 +163,151 @@ class Msdp(object): if hasattr(data, "__iter__"): data = "".join(data) - logger.log_infomsg("MSDP SUBNEGOTIATION: %s" % data) + #logger.log_infomsg("MSDP SUBNEGOTIATION: %s" % data) - for table in regex_table.findall(data): - tables[table[0].upper()] = dict(regex_varval.findall(table[1])) - for array in regex_array.findall(data): - arrays[array[0].upper()] = dict(regex_varval.findall(array[1])) - # get all stand-alone variables, but first we must clean out all tables and arrays (which also contain vars) - variables = dict((key.upper(), val) for key, val in regex_varval.findall(regex_array.sub("", regex_table.sub("", data)))) + for key, table in regex_table.findall(data): + tables[key] = {} + for varval in regex_var.split(table): + parts = regex_val.split(varval) + tables[key].expand({parts[0] : tuple(parts[1:]) if len(parts)>1 else ("",)}) + for key, array in regex_array.findall(data): + arrays[key] = [] + for val in regex_val.split(array): + arrays[key].append(val) + arrays[key] = tuple(arrays[key]) + for varval in regex_var.split(regex_array.sub("", regex_table.sub("", data))): + # get remaining varvals after cleaning away tables/arrays + parts = regex_val.split(varval) + variables[parts[0].upper()] = tuple(parts[1:]) if len(parts)>1 else ("", ) - print "MSDP: table, array, variables:", tables, arrays, variables + #print "MSDP: table, array, variables:", tables, arrays, variables - ret = "" + # all variables sent through msdp to Evennia are considered commands with arguments. + # there are three forms of commands possible through msdp: + # + # VARNAME VAR -> varname(var) + # ARRAYNAME VAR VAL VAR VAL VAR VAL ENDARRAY -> arrayname(val,val,val) + # TABLENAME TABLE VARNAME VAL VARNAME VAL ENDTABLE -> tablename(varname=val, varname=val) + # # default MSDP functions if "LIST" in variables: - ret += self.func_to_msdp("LIST", self.msdp_cmd_list(variables["LIST"])) - del variables["LIST"] + self.data_in("list", *variables.pop("LIST")) if "REPORT" in variables: - ret += self.func_to_msdp("REPORT", self.msdp_cmd_report(*(variables["REPORT"],))) - del variables["REPORT"] + self.data_in("report", *variables.pop("REPORT")) if "REPORT" in arrays: - ret += self.func_to_msdp("REPORT", self.msdp_cmd_report(*arrays["REPORT"])) - del arrays["REPORT"] + self.data_in("report", *(arrays.pop("REPORT"))) + if "UNREPORT" in variables: + self.data_in("unreport", *(arrays.pop("UNREPORT"))) if "RESET" in variables: - ret += self.func_to_msdp("RESET", self.msdp_cmd_reset(*(variables["RESET"],))) - del variables["RESET"] + self.data_in("reset", *variables.pop("RESET")) if "RESET" in arrays: - ret += self.func_to_msdp("RESET", self.msdp_cmd_reset(*arrays["RESET"])) - del arrays["RESET"] + self.data_in("reset", *(arrays.pop("RESET"))) if "SEND" in variables: - ret += self.func_to_msdp("SEND", self.msdp_cmd_send(*(variables["SEND"],))) - del variables["SEND"] + self.data_in("send", *variables.pop("SEND")) if "SEND" in arrays: - ret += self.func_to_msdp("SEND",self.msdp_cmd_send(*arrays["SEND"])) - del arrays["SEND"] + self.data_in("send", *(arrays.pop("SEND"))) + + # if there are anything left consider it a call to a custom function - # if there are anything left we look for a custom function for varname, var in variables.items(): # a simple function + argument - ooc_func = MSDP_COMMANDS_CUSTOM.get(varname.upper()) - if ooc_func: - ret += self.func_to_msdp(varname, ooc_func(var)) + self.data_in(varname, (var,)) for arrayname, array in arrays.items(): # we assume the array are multiple arguments to the function - ooc_func = MSDP_COMMANDS_CUSTOM.get(arrayname.upper()) - if ooc_func: - ret += self.func_to_msdp(arrayname, ooc_func(*array)) + self.data_in(arrayname, *array) for tablename, table in tables.items(): # we assume tables are keyword arguments to the function - ooc_func = MSDP_COMMANDS_CUSTOM.get(arrayname.upper()) - if ooc_func: - ret += self.func_to_msdp(tablename, ooc_func(**table)) + self.data_in(tablename, **table) - if ret: - # send return value if it exists - self.msdp_send(ret) - ret = IAC + SB + MSDP + ret + IAC + SE - #ret = IAC + SB + MSDP + MSDP_VAR + "SEND" + MSDP_VAL + "Testsend" + IAC + SE - self.protocol._write(ret) - logger.log_infomsg("MSDP_RESULT: %s" % ret) - - def msdp_send(self, msdp_string): + def data_out(self, msdp_string): """ Return a msdp-valid subnegotiation across the protocol. """ - self.protocol._write(IAC + SB + MSDP + msdp_string + IAC + SE) + #print "msdp data_out (without IAC SE):", msdp_string + self.protocol ._write(IAC + SB + MSDP + force_str(msdp_string) + IAC + SE) - # MSDP Commands - # Some given MSDP (varname, value) pairs can also be treated as command + argument. - # Generic msdp command map. The argument will be sent to the given command. - # See http://tintin.sourceforge.net/msdp/ for definitions of each command. - # These are client->server commands. - def msdp_cmd_list(self, arg): + def data_in(self, funcname, *args, **kwargs): """ - The List command allows for retrieving various info about the server/client + Send oob data to Evennia """ - if arg == 'COMMANDS': - return self.func_to_msdp(arg, MSDP_COMMANDS) - elif arg == 'LISTS': - return self.func_to_msdp(arg, ("COMMANDS", "LISTS", "CONFIGURABLE_VARIABLES", - "REPORTED_VARIABLES", "SENDABLE_VARIABLES")) - elif arg == 'CONFIGURABLE_VARIABLES': - return self.func_to_msdp(arg, ("CLIENT_NAME", "CLIENT_VERSION", "PLUGIN_ID")) - elif arg == 'REPORTABLE_VARIABLES': - return self.func_to_msdp(arg, MSDP_REPORTABLE.keys()) - elif arg == 'REPORTED_VARIABLES': - # the dynamically set items to report - return self.func_to_msdp(arg, self.msdp_reported.keys()) - elif arg == 'SENDABLE_VARIABLES': - return self.func_to_msdp(arg, MSDP_SENDABLE.keys()) - else: - return self.func_to_msdp("LIST", arg) + #print "msdp data_in:", funcname, args, kwargs + self.protocol.data_in(text=None, oob=(funcname, args, kwargs)) - # default msdp commands + # # MSDP Commands + # # Some given MSDP (varname, value) pairs can also be treated as command + argument. + # # Generic msdp command map. The argument will be sent to the given command. + # # See http://tintin.sourceforge.net/msdp/ for definitions of each command. + # # These are client->server commands. + # def msdp_cmd_list(self, arg): + # """ + # The List command allows for retrieving various info about the server/client + # """ + # if arg == 'COMMANDS': + # return self.evennia_to_msdp(arg, MSDP_COMMANDS) + # elif arg == 'LISTS': + # return self.evennia_to_msdp(arg, ("COMMANDS", "LISTS", "CONFIGURABLE_VARIABLES", + # "REPORTED_VARIABLES", "SENDABLE_VARIABLES")) + # elif arg == 'CONFIGURABLE_VARIABLES': + # return self.evennia_to_msdp(arg, ("CLIENT_NAME", "CLIENT_VERSION", "PLUGIN_ID")) + # elif arg == 'REPORTABLE_VARIABLES': + # return self.evennia_to_msdp(arg, MSDP_REPORTABLE.keys()) + # elif arg == 'REPORTED_VARIABLES': + # # the dynamically set items to report + # return self.evennia_to_msdp(arg, self.msdp_reported.keys()) + # elif arg == 'SENDABLE_VARIABLES': + # return self.evennia_to_msdp(arg, MSDP_SENDABLE.keys()) + # else: + # return self.evennia_to_msdp("LIST", arg) - def msdp_cmd_report(self, *arg): - """ - The report command instructs the server to start reporting a - reportable variable to the client. - """ - try: - return MSDP_REPORTABLE[arg](report=True) - except Exception: - logger.log_trace() + # # default msdp commands - def msdp_cmd_unreport(self, arg): - """ - Unreport a previously reported variable - """ - try: - MSDP_REPORTABLE[arg](report=False) - except Exception: - self.logger.log_trace() + # def msdp_cmd_report(self, *arg): + # """ + # The report command instructs the server to start reporting a + # reportable variable to the client. + # """ + # try: + # return MSDP_REPORTABLE[arg](report=True) + # except Exception: + # logger.log_trace() - def msdp_cmd_reset(self, arg): - """ - The reset command resets a variable to its initial state. - """ - try: - MSDP_REPORTABLE[arg](reset=True) - except Exception: - logger.log_trace() + # def msdp_cmd_unreport(self, arg): + # """ + # Unreport a previously reported variable + # """ + # try: + # MSDP_REPORTABLE[arg](report=False) + # except Exception: + # self.logger.log_trace() - def msdp_cmd_send(self, arg): - """ - Request the server to send a particular variable - to the client. + # def msdp_cmd_reset(self, arg): + # """ + # The reset command resets a variable to its initial state. + # """ + # try: + # MSDP_REPORTABLE[arg](reset=True) + # except Exception: + # logger.log_trace() - arg - this is a list of variables the client wants. - """ - ret = [] - if arg: - for var in make_iter(arg): - try: - ret.append(MSDP_REPORTABLE[var.upper()])# (send=True)) - except Exception: - ret.append("ERROR")#logger.log_trace() - return ret + # def msdp_cmd_send(self, *args): + # """ + # Request the server to send a particular variable + # to the client. + + # arg - this is a list of variables the client wants. + # """ + # ret = [] + # for var in make_iter(arg) + + + + + # for var in make_iter(arg): + # try: + # ret.append(MSDP_REPORTABLE[var.upper()])# (send=True)) + # except Exception: + # ret.append("ERROR")#logger.log_trace() + # return ret diff --git a/src/server/portal/portalsessionhandler.py b/src/server/portal/portalsessionhandler.py index f0dbb40dac..fffbf28c25 100644 --- a/src/server/portal/portalsessionhandler.py +++ b/src/server/portal/portalsessionhandler.py @@ -127,7 +127,6 @@ class PortalSessionHandler(SessionHandler): in from the protocol to the server. data is serialized before passed on. """ - #print "portal_data_in:", string self.portal.amp_protocol.call_remote_MsgPortal2Server(session.sessid, msg=text, data=kwargs) @@ -138,6 +137,7 @@ class PortalSessionHandler(SessionHandler): for session in self.sessions.values(): session.data_out(message) + def data_out(self, sessid, text=None, **kwargs): """ Called by server for having the portal relay messages and data diff --git a/src/server/portal/telnet.py b/src/server/portal/telnet.py index 9ed178b2da..ca886690e4 100644 --- a/src/server/portal/telnet.py +++ b/src/server/portal/telnet.py @@ -13,6 +13,7 @@ from src.server.session import Session from src.server.portal import ttype, mssp, msdp from src.server.portal.mccp import Mccp, mccp_compress, MCCP from src.utils import utils, ansi, logger +from src.utils.utils import make_iter, is_iter _RE_N = re.compile(r"\{n$") @@ -36,14 +37,13 @@ class TelnetProtocol(Telnet, StatefulTelnetProtocol, Session): # negotiate ttype (client info) #self.ttype = ttype.Ttype(self) # negotiate mssp (crawler communication) - self.mssp = mssp.Mssp(self) + #self.mssp = mssp.Mssp(self) # msdp - #self.msdp = msdp.Msdp(self) + self.msdp = msdp.Msdp(self) # add this new connection to sessionhandler so # the Server becomes aware of it. self.sessionhandler.connect(self) - def enableRemote(self, option): """ This sets up the remote-activated options we allow for this protocol. @@ -69,7 +69,6 @@ class TelnetProtocol(Telnet, StatefulTelnetProtocol, Session): else: return super(TelnetProtocol, self).disableLocal(option) - def connectionLost(self, reason): """ This is executed when the connection is lost for @@ -86,13 +85,6 @@ class TelnetProtocol(Telnet, StatefulTelnetProtocol, Session): be handled in line mode. Some clients also sends an erroneous line break after IAC, which we must watch out for. """ - #print "dataRcv (%s):" % data, - #try: - # for b in data: - # print ord(b), - # print "" - #except Exception, e: - # print str(e) + ":", str(data) if data and data[0] == IAC or self.iaw_mode: try: @@ -103,8 +95,16 @@ class TelnetProtocol(Telnet, StatefulTelnetProtocol, Session): else: self.iaw_mode = False return - except Exception: - logger.log_trace() + except Exception, err1: + conv = "" + try: + for b in data: + conv += " " + repr(ord(b)) + except Exception, err2: + conv = str(err2) + ":", str(data) + out = "Telnet Error (%s): %s (%s)" % (err1, data, conv) + logger.log_trace(out) + return # if we get to this point the command must end with a linebreak. # We make sure to add it, to fix some clients messing this up. data = data.rstrip("\r\n") + "\n" @@ -131,7 +131,7 @@ class TelnetProtocol(Telnet, StatefulTelnetProtocol, Session): Telnet method called when data is coming in over the telnet connection. We pass it on to the game engine directly. """ - self.sessionhandler.data_in(self, string) + self.data_in(text=string) # Session hooks @@ -145,11 +145,17 @@ class TelnetProtocol(Telnet, StatefulTelnetProtocol, Session): self.data_out(reason) self.connectionLost(reason) + def data_in(self, text=None, **kwargs): + """ + Data Telnet -> Server + """ + self.sessionhandler.data_in(self, text=text, **kwargs) + def data_out(self, text=None, **kwargs): """ + Data Evennia -> Player. generic hook method for engine to call in order to send data through the telnet connection. - Data Evennia -> Player. valid telnet kwargs: raw=True - pass string through without any ansi processing (i.e. include Evennia @@ -163,6 +169,15 @@ class TelnetProtocol(Telnet, StatefulTelnetProtocol, Session): except Exception, e: self.sendLine(str(e)) return + if "oob" in kwargs: + oobstruct = self.sessionhandler.oobstruct_parser(kwargs.pop("oob")) + if "MSDP" in self.protocol_flags: + for cmdname, args, kwargs in oobstruct: + #print "cmdname, args, kwargs:", cmdname, args, kwargs + msdp_string = self.msdp.evennia_to_msdp(cmdname, *args, **kwargs) + #print "msdp_string:", msdp_string + self.msdp.data_out(msdp_string) + ttype = self.protocol_flags.get('TTYPE', {}) raw = kwargs.get("raw", False) nomarkup = not (ttype or ttype.get('256 COLORS') or ttype.get('ANSI') or not ttype.get("init_done")) diff --git a/src/server/server.py b/src/server/server.py index 21429e4b53..e900b0694a 100644 --- a/src/server/server.py +++ b/src/server/server.py @@ -214,10 +214,14 @@ class Evennia(object): [(o.typeclass, o.at_init()) for o in ObjectDB.get_all_cached_instances()] [(p.typeclass, p.at_init()) for p in PlayerDB.get_all_cached_instances()] + with open(SERVER_RESTART, 'r') as f: + mode = f.read() + if mode in ('True', 'reload'): + from src.server.oobhandler import OOB_HANDLER + OOB_HANDLER.restore() + if SERVER_STARTSTOP_MODULE: # call correct server hook based on start file value - with open(SERVER_RESTART, 'r') as f: - mode = f.read() if mode in ('True', 'reload'): # True was the old reload flag, kept for compatibilty SERVER_STARTSTOP_MODULE.at_server_reload_start() @@ -280,6 +284,9 @@ class Evennia(object): yield self.sessions.all_sessions_portal_sync() ServerConfig.objects.conf("server_restart_mode", "reload") + from src.server.oobhandler import OOB_HANDLER + OOB_HANDLER.save() + if SERVER_STARTSTOP_MODULE: SERVER_STARTSTOP_MODULE.at_server_reload_stop() diff --git a/src/server/serversession.py b/src/server/serversession.py index a944740805..a497124e5c 100644 --- a/src/server/serversession.py +++ b/src/server/serversession.py @@ -20,6 +20,7 @@ from src.server.session import Session IDLE_COMMAND = settings.IDLE_COMMAND _GA = object.__getattribute__ _ObjectDB = None +_OOB_HANDLER = None # load optional out-of-band function module OOB_PLUGIN_MODULE = settings.OOB_PLUGIN_MODULE @@ -136,6 +137,14 @@ class ServerSession(Session): return self.logged_in and self.puppet get_character = get_puppet + def get_puppet_or_player(self): + """ + Returns session if not logged in; puppet if one exists, otherwise return the player. + """ + if self.logged_in: + return self.puppet if self.puppet else self.player + return None + def log(self, message, channel=True): """ Emits session info to the appropriate outputs and info channels. @@ -178,8 +187,14 @@ class ServerSession(Session): cmdhandler.cmdhandler(self, text, callertype="session", sessid=self.sessid) self.update_session_counters() if "oob" in kwargs: - # relay to OOB handler - pass + # handle oob instructions + global _OOB_HANDLER + if not _OOB_HANDLER: + from src.server.oobhandler import OOB_HANDLER as _OOB_HANDLER + oobstruct = self.sessionhandler.oobstruct_parser(kwargs.pop("oob", None)) + for (funcname, args, kwargs) in oobstruct: + if funcname: + _OOB_HANDLER.execute_cmd(self, funcname, *args, **kwargs) execute_cmd = data_in # alias diff --git a/src/server/sessionhandler.py b/src/server/sessionhandler.py index 0ae60c2ff3..0b5b4dddd1 100644 --- a/src/server/sessionhandler.py +++ b/src/server/sessionhandler.py @@ -27,6 +27,7 @@ _ServerSession = None _ServerConfig = None _ScriptDB = None + # AMP signals PCONN = chr(1) # portal session connect PDISCONN = chr(2) # portal session disconnect @@ -96,6 +97,55 @@ class SessionHandler(object): """ return dict((sessid, sess.get_sync_data()) for sessid, sess in self.sessions.items()) + def oobstruct_parser(self, oobstruct): + """ + Helper method for each session to use to parse oob structures + (The 'oob' kwarg of the msg() method) + allowed oob structures are + cmdname + ((cmdname,), (cmdname,)) + (cmdname,(arg, )) + (cmdname,(arg1,arg2)) + (cmdname,{key:val,key2:val2}) + (cmdname, (args,), {kwargs}) + ((cmdname, (arg1,arg2)), cmdname, (cmdname, (arg1,))) + outputs an ordered structure on the form + ((cmdname, (args,), {kwargs}), ...), where the two last parts of each tuple may be empty + """ + def _parse(oobstruct): + slen = len(oobstruct) + if not oobstruct: + return tuple(None, (), {}) + elif not hasattr(oobstruct, "__iter__"): + # a singular command name, without arguments or kwargs + return (oobstruct.lower(), (), {}) + # regardless of number of args/kwargs, the first element must be the function name. + # we will not catch this error if not, but allow it to propagate. + if slen == 1: + return (oobstruct[0].lower(), (), {}) + elif slen == 2: + if isinstance(oobstruct[1], dict): + # cmdname, {kwargs} + return (oobstruct[0].lower(), (), dict(oobstruct[1])) + elif isinstance(oobstruct[1], (tuple, list)): + # cmdname, (args,) + return (oobstruct[0].lower(), tuple(oobstruct[1]), {}) + else: + # cmdname, (args,), {kwargs} + return (oobstruct[0].lower(), tuple(oobstruct[1]), dict(oobstruct[2])) + + if hasattr(oobstruct, "__iter__"): + # differentiate between (cmdname, cmdname), (cmdname, args, kwargs) and ((cmdname,args,kwargs), (cmdname,args,kwargs), ...) + + if oobstruct and isinstance(oobstruct[0], basestring): + return (tuple(_parse(oobstruct)),) + else: + out = [] + for oobpart in oobstruct: + out.append(_parse(oobpart)) + return (tuple(out),) + return (_parse(oobstruct),) + #------------------------------------------------------------ # Server-SessionHandler class #------------------------------------------------------------ @@ -357,6 +407,7 @@ class ServerSessionHandler(SessionHandler): return self.sessions.get(sessid) return None + def announce_all(self, message): """ Send message to all connected sessions @@ -379,5 +430,4 @@ class ServerSessionHandler(SessionHandler): if session: session.data_in(text=text, **kwargs) - SESSIONS = ServerSessionHandler() diff --git a/src/settings_default.py b/src/settings_default.py index 58f8b7e5ba..ae3c3257d1 100644 --- a/src/settings_default.py +++ b/src/settings_default.py @@ -199,7 +199,7 @@ MSSP_META_MODULE = "" # Module holding OOB (Out of Band) hook objects. This allows for customization # and expansion of which hooks OOB protocols are allowed to call on the server # protocols for attaching tracker hooks for when various object field change -OOB_PLUGIN_MODULE = "" +OOB_PLUGIN_MODULE = "src.server.oob_defaults" # Tuple of modules implementing lock functions. All callable functions # inside these modules will be available as lock functions. LOCK_FUNC_MODULES = ("src.locks.lockfuncs",) diff --git a/src/typeclasses/models.py b/src/typeclasses/models.py index 77c44ba2ba..67c704567a 100644 --- a/src/typeclasses/models.py +++ b/src/typeclasses/models.py @@ -437,7 +437,7 @@ class NickHandler(AttributeHandler): with categories nick_ """ def has(self, key, category="inputline"): - categry = "nick_%s" % category + category = "nick_%s" % category return super(NickHandler, self).has(key, category=category) def get(self, key=None, category="inputline", **kwargs): @@ -462,6 +462,34 @@ class NickHandler(AttributeHandler): return super(NickHandler, self).all(category=category) return _GA(self.obj, self._m2m_fieldname).filter(db_category__startswith="nick_") +class NAttributeHandler(object): + """ + This stand-alone handler manages non-database saved properties by storing them + as properties on obj.ndb. It has the same methods as AttributeHandler, but they + are much simplified. + """ + def __init__(self, obj): + "initialized on the object" + self.ndb = _GA(obj, "ndb") + def has(self, key): + "Check if object has this attribute or not" + return _GA(self.ndb, key) # ndb returns None if not found + def get(self, key): + "Returns named key value" + return _GA(self.ndb, key) + def add(self, key, value): + "Add new key and value" + _SA(self.ndb, key, value) + def remove(self, key): + "Remove key from storage" + _DA(self.ndb, key) + def all(self): + "List all keys stored" + if callable(self.ndb.all): + return self.ndb.all() + else: + return [val for val in self.ndb.__dict__.keys() if not val.startswith('_')] + #------------------------------------------------------------ # # Tags @@ -645,6 +673,7 @@ class TypedObject(SharedMemoryModel): _SA(self, "dbobj", self) # this allows for self-reference _SA(self, "locks", LockHandler(self)) _SA(self, "permissions", PermissionHandler(self)) + _SA(self, "nattributes", NAttributeHandler(self)) class Meta: """ @@ -1148,6 +1177,9 @@ class TypedObject(SharedMemoryModel): if hperm in perms and hpos > ppos) return False + # + # Memory management + # def flush_from_cache(self): """ @@ -1157,6 +1189,60 @@ class TypedObject(SharedMemoryModel): """ self.__class__.flush_cached_instance(self) + # + # Attribute storage + # + + #@property db + def __db_get(self): + """ + Attribute handler wrapper. Allows for the syntax + obj.db.attrname = value + and + value = obj.db.attrname + and + del obj.db.attrname + and + all_attr = obj.db.all (unless there is no attribute named 'all', in which + case that will be returned instead). + """ + try: + return self._db_holder + except AttributeError: + class DbHolder(object): + "Holder for allowing property access of attributes" + def __init__(self, obj): + _SA(self, 'obj', obj) + _SA(self, "attrhandler", _GA(_GA(self, "obj"), "attributes")) + def __getattribute__(self, attrname): + if attrname == 'all': + # we allow to overload our default .all + attr = _GA(self, "attrhandler").get("all") + if attr: + return attr + return _GA(self, 'all') + return _GA(self, "attrhandler").get(attrname) + def __setattr__(self, attrname, value): + _GA(self, "attrhandler").add(attrname, value) + def __delattr__(self, attrname): + _GA(self, "attrhandler").remove(attrname) + def get_all(self): + return _GA(self, "attrhandler").all() + all = property(get_all) + self._db_holder = DbHolder(self) + return self._db_holder + #@db.setter + def __db_set(self, value): + "Stop accidentally replacing the db object" + string = "Cannot assign directly to db object! " + string += "Use db.attr=value instead." + raise Exception(string) + #@db.deleter + def __db_del(self): + "Stop accidental deletion." + raise Exception("Cannot delete the db object!") + db = property(__db_get, __db_set, __db_del) + # # Non-persistent (ndb) storage # @@ -1202,35 +1288,12 @@ class TypedObject(SharedMemoryModel): raise Exception("Cannot delete the ndb object!") ndb = property(__ndb_get, __ndb_set, __ndb_del) - #def nattr(self, attribute_name=None, value=None, delete=False): - # """ - # This allows for assigning non-persistent data on the object using - # a method call. Will return None if trying to access a non-existing property. - # """ - # if attribute_name == None: - # # act as a list method - # if callable(self.ndb.all): - # return self.ndb.all() - # else: - # return [val for val in self.ndb.__dict__.keys() - # if not val.startswith['_']] - # elif delete == True: - # if hasattr(self.ndb, attribute_name): - # _DA(_GA(self, "ndb"), attribute_name) - # elif value == None: - # # act as a getter. - # if hasattr(self.ndb, attribute_name): - # _GA(_GA(self, "ndb"), attribute_name) - # else: - # return None - # else: - # # act as a setter - # _SA(self.ndb, attribute_name, value) + # - # Attribute handler methods - DEPRECATED! + # ***** DEPRECATED METHODS BELOW ******* # # @@ -1366,56 +1429,31 @@ class TypedObject(SharedMemoryModel): # creating a new attribute - check access on storing object! _GA(self, "attributes").add(attribute_name, value, accessing_obj=accessing_object, default_access=default_access_create) - #@property - def __db_get(self): + def nattr(self, attribute_name=None, value=None, delete=False): """ - A second convenience wrapper for the the attribute methods. It - allows for the syntax - obj.db.attrname = value - and - value = obj.db.attrname - and - del obj.db.attrname - and - all_attr = obj.db.all (unless there is no attribute named 'all', in which - case that will be returned instead). + This allows for assigning non-persistent data on the object using + a method call. Will return None if trying to access a non-existing property. """ - try: - return self._db_holder - except AttributeError: - class DbHolder(object): - "Holder for allowing property access of attributes" - def __init__(self, obj): - _SA(self, 'obj', obj) - _SA(self, "attrhandler", _GA(_GA(self, "obj"), "attributes")) - def __getattribute__(self, attrname): - if attrname == 'all': - # we allow to overload our default .all - attr = _GA(self, "attrhandler").get("all") - if attr: - return attr - return _GA(self, 'all') - return _GA(self, "attrhandler").get(attrname) - def __setattr__(self, attrname, value): - _GA(self, "attrhandler").add(attrname, value) - def __delattr__(self, attrname): - _GA(self, "attrhandler").remove(attrname) - def get_all(self): - return _GA(self, "attrhandler").all() - all = property(get_all) - self._db_holder = DbHolder(self) - return self._db_holder - #@db.setter - def __db_set(self, value): - "Stop accidentally replacing the db object" - string = "Cannot assign directly to db object! " - string += "Use db.attr=value instead." - raise Exception(string) - #@db.deleter - def __db_del(self): - "Stop accidental deletion." - raise Exception("Cannot delete the db object!") - db = property(__db_get, __db_set, __db_del) + logger.log_depmsg("obj.nattr() is deprecated. Use obj.nattributes instead.") + if attribute_name == None: + # act as a list method + if callable(self.ndb.all): + return self.ndb.all() + else: + return [val for val in self.ndb.__dict__.keys() + if not val.startswith['_']] + elif delete == True: + if hasattr(self.ndb, attribute_name): + _DA(_GA(self, "ndb"), attribute_name) + elif value == None: + # act as a getter. + if hasattr(self.ndb, attribute_name): + _GA(_GA(self, "ndb"), attribute_name) + else: + return None + else: + # act as a setter + _SA(self.ndb, attribute_name, value) diff --git a/src/utils/dbserialize.py b/src/utils/dbserialize.py index 73308a2eeb..61e402010d 100644 --- a/src/utils/dbserialize.py +++ b/src/utils/dbserialize.py @@ -196,7 +196,7 @@ def pack_dbobj(item): # build the internal representation as a tuple ("__packed_dbobj__", key, creation_time, id) return natural_key and ('__packed_dbobj__', natural_key, _TO_DATESTRING(obj), _GA(obj, "id")) or item -def _unpack_dbobj(item): +def unpack_dbobj(item): """ Check and convert internal representations back to Django database models. The fact that item is a packed dbobj should be checked before this call. @@ -209,7 +209,9 @@ def _unpack_dbobj(item): except ObjectDoesNotExist: return None # even if we got back a match, check the sanity of the date (some databases may 're-use' the id) - return _TO_DATESTRING(obj.dbobj) == item[2] and obj or None + try: dbobj = obj.dbobj + except AttributeError: dbobj = obj + return _TO_DATESTRING(dbobj) == item[2] and obj or None # # Access methods @@ -267,7 +269,7 @@ def from_pickle(data, db_obj=None): return item elif _IS_PACKED_DBOBJ(item): # this must be checked before tuple - return _unpack_dbobj(item) + return unpack_dbobj(item) elif dtype == tuple: return tuple(process_item(val) for val in item) elif dtype == dict: @@ -289,7 +291,7 @@ def from_pickle(data, db_obj=None): return item elif _IS_PACKED_DBOBJ(item): # this must be checked before tuple - return _unpack_dbobj(item) + return unpack_dbobj(item) elif dtype == tuple: return tuple(process_tree(val) for val in item) elif dtype == list: diff --git a/src/utils/utils.py b/src/utils/utils.py index 368eb07764..3ef27553de 100644 --- a/src/utils/utils.py +++ b/src/utils/utils.py @@ -729,10 +729,10 @@ def mod_import(module): def all_from_module(module): """ - Return all global-level variables from a module + Return all global-level variables from a module as a dict """ mod = mod_import(module) - return [val for key, val in mod.__dict__.items() if not (key.startswith("_") or ismodule(val))] + return dict((key, val) for key, val in mod.__dict__.items() if not (key.startswith("_") or ismodule(val))) def variable_from_module(module, variable=None, default=None): """