diff --git a/src/objects/models.py b/src/objects/models.py index 6ae3286d93..8ed9f0279a 100644 --- a/src/objects/models.py +++ b/src/objects/models.py @@ -211,9 +211,10 @@ class ObjectDB(TypedObject): # del_field_cache(self, "sessid") #sessid = property(__sessid_get, __sessid_set, __sessid_del) - def _db_location_handler(self, loc, old_value=None): - "This handles changes to the db_location field." + def _at_db_location_save(self, new_value, old_value=None): + "This is called automatically just before a new location is saved." #print "db_location_handler:", loc, old_value + loc = new_value try: old_loc = old_value # new_value can be dbref, typeclass or dbmodel diff --git a/src/server/caches.py b/src/server/caches.py index 72fceb8f1c..7b8da2dfda 100644 --- a/src/server/caches.py +++ b/src/server/caches.py @@ -25,6 +25,11 @@ _FIELD_CACHE = {} _ATTR_CACHE = {} _PROP_CACHE = defaultdict(dict) +# OOB trackers +_TRACKED_FIELDS = {} +_TRACKED_ATTRS = {} +_TRACKED_CACHE = {} + #------------------------------------------------------------ # Cache key hash generation @@ -80,13 +85,13 @@ def hashid(obj, suffix=""): # they are saved, no matter from where. #------------------------------------------------------------ -# callback to pre_save signal (connected in src.server.server) +# callback to field pre_save signal (connected in src.server.server) def field_pre_save(sender, instance=None, update_fields=None, raw=False, **kwargs): """ - Called at the beginning of the save operation. The save method + Called at the beginning of the field save operation. The save method must be called with the update_fields keyword in order to be most efficient. This method should NOT save; rather it is the save() that triggers this function. - Its main purpose is to allow to plug-in a save handler. + Its main purpose is to allow to plug-in a save handler and oob handlers. """ if raw: return @@ -101,8 +106,9 @@ def field_pre_save(sender, instance=None, update_fields=None, raw=False, **kwarg fieldname = field.name new_value = field.value_from_object(instance) # try to see if there is a handler on object that should be triggered when saving. - handlername = "_%s_handler" % fieldname + handlername = "_at_%s_save" % fieldname try: + # try-except is about 14 times faster than hasattr in this case handler = _GA(instance, handlername) except AttributeError: handler = None @@ -117,6 +123,8 @@ def field_pre_save(sender, instance=None, update_fields=None, raw=False, **kwarg new_value = handler(new_value, old_value=old_value) # we re-assign this to the field, save() will pick it up from there _SA(instance, fieldname, new_value) + if instance and hasattr(instance, "oobhandler"): + _GA(instance, "oobhandler").update("fieldset", fieldname, old_value, new_value) #if hid: # # update cache # _FIELD_CACHE[hid] = new_value @@ -220,6 +228,8 @@ def get_prop_cache(obj, propname): def set_prop_cache(obj, propname, propvalue): "Set property cache" hid = hashid(obj, "-%s" % propname) + if obj and hasattr(obj, "oobhandler"): + obj.oobhandler.update(propname, _GA(obj, propname), propvalue, type="property", action="set") if hid: #print "set_prop_cache", propname, propvalue _PROP_CACHE[hid][propname] = propvalue @@ -228,6 +238,8 @@ def set_prop_cache(obj, propname, propvalue): def del_prop_cache(obj, propname): "Delete element from property cache" hid = hashid(obj, "-%s" % propname) + if obj and hasattr(obj, "oobhandler"): + obj.oobhandler.update(propname, _GA(obj, propname), None, type="property", action="delete") if hid and propname in _PROP_CACHE[hid]: del _PROP_CACHE[hid][propname] #_PROP_CACHE.delete(hid) @@ -319,212 +331,3 @@ def flush_prop_cache(): # if oob_hook: # oob_hook[0](obj.typeclass, attrname, value, *oob_hook[1], **oob_hook[2]) # -# - -# # old cache system -# -# if _ENABLE_LOCAL_CACHES: -# # Cache stores -# _ATTR_CACHE = defaultdict(dict) -# _FIELD_CACHE = defaultdict(dict) -# _PROP_CACHE = defaultdict(dict) -# -# -# def get_cache_sizes(): -# """ -# Get cache sizes, expressed in number of objects and memory size in MB -# """ -# global _ATTR_CACHE, _FIELD_CACHE, _PROP_CACHE -# -# attr_n = sum(len(dic) for dic in _ATTR_CACHE.values()) -# attr_mb = sum(sum(getsizeof(obj) for obj in dic.values()) for dic in _ATTR_CACHE.values()) / 1024.0 -# -# field_n = sum(len(dic) for dic in _FIELD_CACHE.values()) -# field_mb = sum(sum([getsizeof(obj) for obj in dic.values()]) for dic in _FIELD_CACHE.values()) / 1024.0 -# -# prop_n = sum(len(dic) for dic in _PROP_CACHE.values()) -# prop_mb = sum(sum([getsizeof(obj) for obj in dic.values()]) for dic in _PROP_CACHE.values()) / 1024.0 -# -# return (attr_n, attr_mb), (field_n, field_mb), (prop_n, prop_mb) -# -# # on-object database field cache -# def get_field_cache(obj, name): -# "On-model Cache handler." -# global _FIELD_CACHE -# hid = hashid(obj) -# if hid: -# try: -# return _FIELD_CACHE[hid][name] -# except KeyError: -# val = _GA(obj, "db_%s" % name) -# _FIELD_CACHE[hid][name] = val -# return val -# return _GA(obj, "db_%s" % name) -# -# def set_field_cache(obj, name, val): -# "On-model Cache setter. Also updates database." -# _SA(obj, "db_%s" % name, val) -# _GA(obj, "save")() -# hid = hashid(obj) -# if hid: -# global _FIELD_CACHE -# _FIELD_CACHE[hid][name] = val -# # oob hook functionality -# if _OOB_FIELD_UPDATE_HOOKS[hid].get(name): -# _OOB_HANDLER.update(hid, name, val) -# -# def del_field_cache(obj, name): -# "On-model cache deleter" -# hid = hashid(obj) -# _SA(obj, "db_%s" % name, None) -# _GA(obj, "save")() -# if hid: -# try: -# del _FIELD_CACHE[hid][name] -# except KeyError: -# pass -# if _OOB_FIELD_UPDATE_HOOKS[hid].get(name): -# _OOB_HANDLER.update(hid, name, None) -# -# def flush_field_cache(obj=None): -# "On-model cache resetter" -# hid = hashid(obj) -# global _FIELD_CACHE -# if hid: -# try: -# del _FIELD_CACHE[hashid(obj)] -# except KeyError, e: -# pass -# else: -# # clean cache completely -# _FIELD_CACHE = defaultdict(dict) -# -# # on-object property cache (unrelated to database) -# # Note that the get/set_prop_cache handler do not actually -# # get/set the property "on" the object but only reads the -# # value to/from the cache. This is intended to be used -# # with a get/setter property on the object. -# -# def get_prop_cache(obj, name, default=None): -# "On-model Cache handler." -# global _PROP_CACHE -# hid = hashid(obj) -# if hid: -# try: -# val = _PROP_CACHE[hid][name] -# except KeyError: -# return default -# _PROP_CACHE[hid][name] = val -# return val -# return default -# -# def set_prop_cache(obj, name, val): -# "On-model Cache setter. Also updates database." -# hid = hashid(obj) -# if hid: -# global _PROP_CACHE -# _PROP_CACHE[hid][name] = val -# # oob hook functionality -# oob_hook = _OOB_PROP_UPDATE_HOOKS[hid].get(name) -# if oob_hook: -# oob_hook[0](obj.typeclass, name, val, *oob_hook[1], **oob_hook[2]) -# -# -# def del_prop_cache(obj, name): -# "On-model cache deleter" -# try: -# del _PROP_CACHE[hashid(obj)][name] -# except KeyError: -# pass -# def flush_prop_cache(obj=None): -# "On-model cache resetter" -# hid = hashid(obj) -# global _PROP_CACHE -# if hid: -# try: -# del _PROP_CACHE[hid] -# except KeyError,e: -# pass -# else: -# # clean cache completely -# _PROP_CACHE = defaultdict(dict) -# -# # attribute cache -# -# def get_attr_cache(obj, attrname): -# """ -# Attribute cache store -# """ -# return _ATTR_CACHE[hashid(obj)].get(attrname, None) -# -# def set_attr_cache(obj, attrname, attrobj): -# """ -# Cache an attribute object -# """ -# hid = hashid(obj) -# if hid: -# global _ATTR_CACHE -# _ATTR_CACHE[hid][attrname] = attrobj -# # oob hook functionality -# oob_hook = _OOB_ATTR_UPDATE_HOOKS[hid].get(attrname) -# if oob_hook: -# oob_hook[0](obj.typeclass, attrname, attrobj.value, *oob_hook[1], **oob_hook[2]) -# -# def del_attr_cache(obj, attrname): -# """ -# Remove attribute from cache -# """ -# global _ATTR_CACHE -# try: -# _ATTR_CACHE[hashid(obj)][attrname].no_cache = True -# del _ATTR_CACHE[hashid(obj)][attrname] -# except KeyError: -# pass -# -# def flush_attr_cache(obj=None): -# """ -# Flush the attribute cache for this object. -# """ -# global _ATTR_CACHE -# if obj: -# for attrobj in _ATTR_CACHE[hashid(obj)].values(): -# attrobj.no_cache = True -# del _ATTR_CACHE[hashid(obj)] -# else: -# # clean cache completely -# for objcache in _ATTR_CACHE.values(): -# for attrobj in objcache.values(): -# attrobj.no_cache = True -# _ATTR_CACHE = defaultdict(dict) -# -# -# def flush_obj_caches(obj=None): -# "Clean all caches on this object" -# flush_field_cache(obj) -# flush_prop_cache(obj) -# flush_attr_cache(obj) -# - -#else: - # local caches disabled. Use simple pass-through replacements - -#def flush_field_cache(obj=None): -# pass -# these should get oob handlers when oob is implemented. -#def get_prop_cache(obj, name, default=None): -# return None -#def set_prop_cache(obj, name, val): -# pass -#def del_prop_cache(obj, name): -# pass -#def flush_prop_cache(obj=None): -# pass -#def get_attr_cache(obj, attrname): -# return None -#def set_attr_cache(obj, attrname, attrobj): -# pass -#def del_attr_cache(obj, attrname): -# pass -#def flush_attr_cache(obj=None): -# pass - diff --git a/src/server/oobhandler.py b/src/server/oobhandler.py index 5591c498b1..ca91e8632f 100644 --- a/src/server/oobhandler.py +++ b/src/server/oobhandler.py @@ -41,84 +41,150 @@ def track_desc(session, *args, **kwargs): # to start off we return the value once return char.db.desc + +What is passed around is a dictionary (pickled to a string) on the form + {oobfunction: ((arg1,arg2,...),{kwarg1:val,kwarg2:val}), oobfunction2: ... } + + """ +from django.conf import settings from collections import defaultdict from src.scripts.objects import ScriptDB from src.scripts.script import Script from src.server import caches from src.server.caches import hashid from src.utils import logger, create +from src.utils.utils import variable_from_module + +# get the custom function map of available oob functions +_OOB_FUNCMAP = variable_from_module(settings.OOB_FUNC_MODULE, "OOB_FUNC_MAP", default={}) + + +class _TrackerPool(object): + """ + This maintains a pool of __OOBTracker scripts, ordered by interval + """ + def __init__(self): + self.trackerscripts = {} + def add(self, obj, interval, oobkey): + """ + Add a new tracking + """ + if interval not in self.trackerscripts: + # if no existing interval exists, create new script to fill the gap + new_tracker = create.script(_OOBTracker, interval=interval) + self.trackerscripts[interval] = new_tracker + self.trackerscripts[interval].subscribe(obj, oobkey) + class _OOBTracker(Script): """ - Active tracker script, handles subscriptions + Active tracker script """ def at_script_creation(self): - "Called at script creation" - self.key = "oob_tracking_30" # default to 30 second interval - self.desc = "Active tracking of oob data" - self.interval = 30 - self.persistent = False - self.start_delay = True - # holds dictionary of key:(function, (args,), {kwargs}) to call - self.db.subs = {} - - def track(self, key, func, *args, **kwargs): - """ - Add sub to track. func(*args, **kwargs) will be called at self.interval. - key is a unique identifier for removing the tracking later. - """ - self.subs[key] = (func, args, kwargs) - - def untrack(self, key): - """ - Clear a tracking. Return True if untracked successfully, None if - no previous track was found. - """ - if key in self.subs: - del self.subs[key] - if not self.subs: - # we have no more subs. Stop this script. - self.stop() - return True + "Called when script is initialized" + self.key = "oob_func" + self.desc = "OOB functionality script" + self.persistent = False #oob scripts should always be non-persistent + self.db.subscriptions = {} def at_repeat(self): """ - Loops through all subs, calling their given function + Calls subscriptions every self.interval seconds """ - for func, args, kwargs in self.subs: + for obj, oobkey in self.db.subscriptions.values(): try: - func(*args, **kwargs) + obj.oobhandler.execute_func() except Exception: logger.log_trace() -class _OOBStore(Script): + def subscribe(self, subscriber, oobkey, **kwargs): + """ + Sign up a subscriber to this oobfunction. Subscriber is + a database object with a dbref. + """ + self.db.subscriptions[subscriber.dbid] = (subscriber.dbobj, oobkey, kwargs) + + def unsubscribe(self, subscriber): + """ + Unsubscribe from oobfunction. Returns True if removal was + successful, False otherwise + """ + removed = self.db.subscriptions.pop(subscriber.dbid, False) + return True if removed else False + + +class OOBHandler(object): """ - Store OOB data between restarts + Out-of-band handler. Should be initialized on each model that should be possible to track. + Tracking will apply """ - def at_script_creation(self): - "Called at script creation" - self.key = "oob_save_store" - self.desc = "Stores OOB data" - self.persistent = True - def save_oob_data(self, data): - self.db.store = data - def get_oob_data(self): - return self.db.store + def __init__(self, obj): + "initialize the handler with the object it is stored on" + self.obj = obj + self.tracked = defaultdict(dict) + self.oobstrings = "" + + + def _make_hash(self, callback_key, hashkey): + """ + create an id-hash for storage + """ + return "%s-%s" % (callback_key, hashkey) + + def track(self, callback_key, hashkey, interval=None, **kwargs): + """ + Access method - start tracking given changes on this object + + oobkey - available function key mapped in OOB_FUNC_MODULE.OOB_FUNC_MAP + interval - if None, updating will happen on-demand, only when appropriate callbacks are triggered. + if int > 0, the tracker will actively call oobfunc at this interval. Usually, on-demand + updating is preferred for efficiency reasons. + other kwargs will be passed to oob function given by oobkey at run-time along with other on-the-fly kwargs. + """ + hid = self._make_hash(callback_key, hashkey) + if interval: + _OOBTrackPool.add(self, interval, hid) + self.tracked[hid] = kwargs + + def update_tracked(self, callback_key, hashkey, **kwargs): + """ + Called by tracked systems when they update + """ + hid = self._make_hash(callback_key, hashkey) + if hid in self.tracked: + tkwargs = self.tracked[hid] + kwargs.update(tkwargs) + self.execute_func(oobkey, **kwargs) + + def execute_func(self, callback_key, hashkey, **kwargs): + """ + This is called from the outside to crank the oob mechanism manually + """ + try: + _OOB_FUNC_MAP[callback_key](self.obj, hashkey, **kwargs) + except Exception: + logger.log_trace() class OOBhandler(object): """ Main Out-of-band handler """ - def __init__(self): + def __init__(self, obj): "initialization" + self.obj = obj self.track_passive_subs = defaultdict(dict) scripts = ScriptDB.objects.filter(db_key__startswith="oob_tracking_") self.track_active_subs = dict((s.interval, s) for s in scripts) # set reference on caches module caches._OOB_HANDLER = self + def _init_func(self): + """ + Initialize the + """ + def track_passive(self, oobkey, tracker, tracked, entityname, callback=None, mode="db", *args, **kwargs): """ Passively track changes to an object property, @@ -185,7 +251,7 @@ class OOBhandler(object): if mode in ("db", "ndb", "custom"): caches.unregister_oob_update_hook(tracked, entityname, mode=mode) elif mode == "property": - if hasattr(, 'db_%s' % entityname.lstrip("db_")): + if hasattr(self.obj, 'db_%s' % entityname.lstrip("db_")): caches.unregister_oob_update_hook(tracked, entityname, mode="field") else: caches.unregister_oob_update_hook(tracked, entityname, mode="property") diff --git a/src/utils/utils.py b/src/utils/utils.py index c072c36002..368eb07764 100644 --- a/src/utils/utils.py +++ b/src/utils/utils.py @@ -727,6 +727,13 @@ def mod_import(module): result[0].close() return mod +def all_from_module(module): + """ + Return all global-level variables from a module + """ + mod = mod_import(module) + return [val for key, val in mod.__dict__.items() if not (key.startswith("_") or ismodule(val))] + def variable_from_module(module, variable=None, default=None): """ Retrieve a variable or list of variables from a module. The variable(s) must be defined