Changed cache system to use Django's cache mechanism. Changed field caches to make use of Django signalling instead of custom caching calls (this should make the system consistent also when called from the webserver). Created a wrapper system for easily wrapping fields with a default wrapper (so as to not have to explicitly define the properties (such as objdb.key) which all just do the same thing - load from the field and make sure to call save().

This commit is contained in:
Griatch 2013-05-29 16:16:28 +02:00
parent deafb9c544
commit 8202dba596
5 changed files with 490 additions and 334 deletions

View file

@ -2,14 +2,8 @@
Central caching module.
"""
from sys import getsizeof
from collections import defaultdict
from django.conf import settings
from src.server.models import ServerConfig
from src.utils.utils import to_str, uses_database
_ENABLE_LOCAL_CACHES = settings.GAME_CACHE_TYPE
from src.utils.utils import uses_database, to_str
_GA = object.__getattribute__
_SA = object.__setattr__
@ -21,16 +15,8 @@ if uses_database("mysql") and ServerConfig.objects.get_mysql_db_version() < '5.6
else:
_DATESTRING = "%Y:%m:%d-%H:%M:%S:%f"
# OOB hooks (OOB not yet functional, don't use yet)
_OOB_FIELD_UPDATE_HOOKS = defaultdict(dict)
_OOB_PROP_UPDATE_HOOKS = defaultdict(dict)
_OOB_ATTR_UPDATE_HOOKS = defaultdict(dict)
_OOB_NDB_UPDATE_HOOKS = defaultdict(dict)
_OOB_CUSTOM_UPDATE_HOOKS = defaultdict(dict)
_OOB_HANDLER = None # set by oob handler when it initializes
def hashid(obj):
def hashid(obj, suffix=""):
"""
Returns a per-class unique that combines the object's
class name with its idnum and creation time. This makes this id unique also
@ -57,301 +43,425 @@ def hashid(obj):
# this will happen if setting properties on an object which is not yet saved
return None
# build the hashid
hid = "%s-%s-#%s" % (_GA(obj, "__class__"), date, idnum)
hid = "%s-%s-#%s%s" % (_GA(obj, "__class__"), date, idnum, suffix)
hid = hid.replace(" ", "")
_SA(obj, "_hashid", hid)
return hid
return to_str(hid)
# oob helper functions
def register_oob_update_hook(obj,name, entity="field"):
# signal handlers
from django.core.cache import get_cache
#from django.db.models.signals import pre_save, pre_delete, post_init
# field cache
_FIELD_CACHE = get_cache("field_cache")
if not _FIELD_CACHE:
raise RuntimeError("settings.CACHE does not contain a 'field_cache' entry!")
# callback before saving an object
def field_pre_save(sender, **kwargs):
"""
Register hook function to be called when field/property/db/ndb is updated.
Given function will be called with function(obj, entityname, newvalue, *args, **kwargs)
entity - one of "field", "property", "db", "ndb" or "custom"
Called at the beginning of the save operation. The save method
must be called with the update_fields keyword in order to
"""
hid = hashid(obj)
if hid:
if entity == "field":
global _OOB_FIELD_UPDATE_HOOKS
_OOB_FIELD_UPDATE_HOOKS[hid][name] = True
return
elif entity == "property":
global _OOB_PROP_UPDATE_HOOKS
_OOB_PROP_UPDATE_HOOKS[hid][name] = True
elif entity == "db":
global _OOB_ATTR_UPDATE_HOOKS
_OOB_ATTR_UPDATE_HOOKS[hid][name] = True
elif entity == "ndb":
global _OOB_NDB_UPDATE_HOOKS
_OOB_NDB_UPDATE_HOOKS[hid][name] = True
elif entity == "custom":
global _OOB_CUSTOM_UPDATE_HOOKS
_OOB_CUSTOM_UPDATE_HOOKS[hid][name] = True
else:
return None
global _FIELD_CACHE
def unregister_oob_update_hook(obj, name, entity="property"):
"""
Un-register a report hook
"""
hid = hashid(obj)
if hid:
global _OOB_FIELD_UPDATE_HOOKS,_OOB_PROP_UPDATE_HOOKS, _OOB_ATTR_UPDATE_HOOKS
global _OOB_CUSTOM_UPDATE_HOOKS, _OOB_NDB_UPDATE_HOOKS
if entity == "field" and name in _OOB_FIELD_UPDATE_HOOKS:
del _OOB_FIELD_UPDATE_HOOKS[hid][name]
elif entity == "property" and name in _OOB_PROP_UPDATE_HOOKS:
del _OOB_PROP_UPDATE_HOOKS[hid][name]
elif entity == "db" and name in _OOB_ATTR_UPDATE_HOOKS:
del _OOB_ATTR_UPDATE_HOOKS[hid][name]
elif entity == "ndb" and name in _OOB_NDB_UPDATE_HOOKS:
del _OOB_NDB_UPDATE_HOOKS[hid][name]
elif entity == "custom" and name in _OOB_CUSTOM_UPDATE_HOOKS:
del _OOB_CUSTOM_UPDATE_HOOKS[hid][name]
else:
return None
def call_ndb_hooks(obj, attrname, value):
"""
No caching is done of ndb here, but
we use this as a way to call OOB hooks.
"""
hid = hashid(obj)
if hid:
oob_hook = _OOB_NDB_UPDATE_HOOKS[hid].get(attrname)
if oob_hook:
oob_hook[0](obj.typeclass, attrname, value, *oob_hook[1], **oob_hook[2])
def call_custom_hooks(obj, attrname, value):
"""
Custom handler for developers adding their own oob hooks, e.g. to
custom typeclass properties.
"""
hid = hashid(obj)
if hid:
oob_hook = _OOB_CUSTOM_UPDATE_HOOKS[hid].get(attrname)
if oob_hook:
oob_hook[0](obj.typeclass, attrname, value, *oob_hook[1], **oob_hook[2])
if _ENABLE_LOCAL_CACHES:
# Cache stores
_ATTR_CACHE = defaultdict(dict)
_FIELD_CACHE = defaultdict(dict)
_PROP_CACHE = defaultdict(dict)
def get_cache_sizes():
"""
Get cache sizes, expressed in number of objects and memory size in MB
"""
global _ATTR_CACHE, _FIELD_CACHE, _PROP_CACHE
attr_n = sum(len(dic) for dic in _ATTR_CACHE.values())
attr_mb = sum(sum(getsizeof(obj) for obj in dic.values()) for dic in _ATTR_CACHE.values()) / 1024.0
field_n = sum(len(dic) for dic in _FIELD_CACHE.values())
field_mb = sum(sum([getsizeof(obj) for obj in dic.values()]) for dic in _FIELD_CACHE.values()) / 1024.0
prop_n = sum(len(dic) for dic in _PROP_CACHE.values())
prop_mb = sum(sum([getsizeof(obj) for obj in dic.values()]) for dic in _PROP_CACHE.values()) / 1024.0
return (attr_n, attr_mb), (field_n, field_mb), (prop_n, prop_mb)
# on-object database field cache
def get_field_cache(obj, name):
"On-model Cache handler."
global _FIELD_CACHE
hid = hashid(obj)
if hid:
try:
return _FIELD_CACHE[hid][name]
except KeyError:
val = _GA(obj, "db_%s" % name)
_FIELD_CACHE[hid][name] = val
return val
return _GA(obj, "db_%s" % name)
def set_field_cache(obj, name, val):
"On-model Cache setter. Also updates database."
_SA(obj, "db_%s" % name, val)
_GA(obj, "save")()
hid = hashid(obj)
if hid:
global _FIELD_CACHE
_FIELD_CACHE[hid][name] = val
# oob hook functionality
if _OOB_FIELD_UPDATE_HOOKS[hid].get(name):
_OOB_HANDLER.update(hid, name, val)
def del_field_cache(obj, name):
"On-model cache deleter"
hid = hashid(obj)
_SA(obj, "db_%s" % name, None)
_GA(obj, "save")()
if hid:
try:
del _FIELD_CACHE[hid][name]
except KeyError:
pass
if _OOB_FIELD_UPDATE_HOOKS[hid].get(name):
_OOB_HANDLER.update(hid, name, None)
def flush_field_cache(obj=None):
"On-model cache resetter"
hid = hashid(obj)
global _FIELD_CACHE
if hid:
try:
del _FIELD_CACHE[hashid(obj)]
except KeyError, e:
pass
else:
# clean cache completely
_FIELD_CACHE = defaultdict(dict)
# on-object property cache (unrelated to database)
# Note that the get/set_prop_cache handler do not actually
# get/set the property "on" the object but only reads the
# value to/from the cache. This is intended to be used
# with a get/setter property on the object.
def get_prop_cache(obj, name, default=None):
"On-model Cache handler."
global _PROP_CACHE
hid = hashid(obj)
if hid:
try:
val = _PROP_CACHE[hid][name]
except KeyError:
return default
_PROP_CACHE[hid][name] = val
return val
return default
def set_prop_cache(obj, name, val):
"On-model Cache setter. Also updates database."
hid = hashid(obj)
if hid:
global _PROP_CACHE
_PROP_CACHE[hid][name] = val
# oob hook functionality
oob_hook = _OOB_PROP_UPDATE_HOOKS[hid].get(name)
if oob_hook:
oob_hook[0](obj.typeclass, name, val, *oob_hook[1], **oob_hook[2])
def del_prop_cache(obj, name):
"On-model cache deleter"
if kwargs.pop("raw", False): return
instance = kwargs.pop("instance")
fields = kwargs.pop("update_fields", None)
if fields:
# this is a list of strings at this point. We want field objects
fields = (instance._meta.get_field_by_name(field)[0] for field in fields)
else:
# meta.fields are already field objects
fields = instance._meta.fields
for field in fields:
fieldname = field.name
new_value = field.value_from_object(instance)
handlername = "_%s_handler" % fieldname
try:
del _PROP_CACHE[hashid(obj)][name]
except KeyError:
pass
def flush_prop_cache(obj=None):
"On-model cache resetter"
hid = hashid(obj)
global _PROP_CACHE
handler = _GA(instance, handlername)
except AttributeError:
handler = None
hid = hashid(instance, "-%s" % fieldname)
if callable(handler):
old_value = _FIELD_CACHE.get(hid) if hid else None
# the handler may modify the stored value in various ways
# don't catch exceptions, the handler must work!
new_value = handler(instance, new_value, oldval=old_value)
# we re-assign this to the field, save() will pick it up from there
_SA(instance, fieldname, new_value)
if hid:
try:
del _PROP_CACHE[hid]
except KeyError,e:
pass
else:
# clean cache completely
_PROP_CACHE = defaultdict(dict)
# update cache
_FIELD_CACHE.set(hid, new_value)
# attribute cache
# goes into server:
#pre_save.connect(field_pre_save, dispatch_uid="fieldcache")
def get_attr_cache(obj, attrname):
"""
Attribute cache store
"""
return _ATTR_CACHE[hashid(obj)].get(attrname, None)
def set_attr_cache(obj, attrname, attrobj):
"""
Cache an attribute object
"""
hid = hashid(obj)
if hid:
global _ATTR_CACHE
_ATTR_CACHE[hid][attrname] = attrobj
# oob hook functionality
oob_hook = _OOB_ATTR_UPDATE_HOOKS[hid].get(attrname)
if oob_hook:
oob_hook[0](obj.typeclass, attrname, attrobj.value, *oob_hook[1], **oob_hook[2])
def del_attr_cache(obj, attrname):
"""
Remove attribute from cache
"""
global _ATTR_CACHE
try:
_ATTR_CACHE[hashid(obj)][attrname].no_cache = True
del _ATTR_CACHE[hashid(obj)][attrname]
except KeyError:
pass
def flush_attr_cache(obj=None):
"""
Flush the attribute cache for this object.
"""
global _ATTR_CACHE
if obj:
for attrobj in _ATTR_CACHE[hashid(obj)].values():
attrobj.no_cache = True
del _ATTR_CACHE[hashid(obj)]
else:
# clean cache completely
for objcache in _ATTR_CACHE.values():
for attrobj in objcache.values():
attrobj.no_cache = True
_ATTR_CACHE = defaultdict(dict)
## attr cache - caching the attribute objects related to a given object to
## avoid lookups more than necessary (this makes attributes en par in speed
## to any property). The signal is triggered by the Attribute itself when it
## is created or deleted (it holds a reference to the object)
#
#_ATTR_CACHE = get_cache("attr_cache")
#if not _ATTR_CACHE:
# raise RuntimeError("settings.CACHE does not contain an 'attr_cache' entry!")
#
#def attr_post_init(sender, **kwargs):
# "Called when attribute is created or retrieved in connection with obj."
# hid = hashid(sender.db_obj, "-%s" % sender.db_key)
# _ATTR_CACHE.set(hid, sender)
#def attr_pre_delete(sender, **kwargs):
# "Called when attribute is deleted (del_attribute)"
# hid = hashid(sender.db_obj, "-%s" % sender.db_key)
# _ATTR_CACHE.delete(hid)
#
### goes into server:
#from src.objects.models import ObjAttribute
#from src.scripts.models import ScriptAttribute
#from src.players.models import PlayerAttribute
#post_init.connect(attr_post_init, sender=ObjAttribute, dispatch_uid="objattrcache")
#post_init.connect(attr_post_init, sender=ScriptAttribute, dispatch_uid="scriptattrcache")
#post_init.connect(attr_post_init, sender=PlayerAttribute, dispatch_uid="playerattrcache")
#pre_delete.connect(attr_pre_delete, sender=ObjAttribute, dispatch_uid="objattrcache")
#pre_delete.connect(attr_pre_delete, sender=ScriptAttribute, dispatch_uid="scriptattrcache")
#pre_delete.connect(attr_pre_delete, sender=PlayerAttribute, dispatch_uid="playerattrcache")
#
#
## property cache - this doubles as a central cache and as a way
## to trigger oob on such changes.
#
#from django.dispatch import Signal
#_PROP_CACHE = get_cache("prop_cache")
#if not _PROP_CACHE:
# raise RuntimeError("settings.CACHE does not contain a 'prop_cache' entry!")
#
#PROP_POST_UPDATE = Signal(providing_args=["propname", "propvalue"])
#
#def prop_update(sender, **kwargs):
# "Called when a propery is updated. kwargs are propname and propvalue."
# propname, propvalue = kwargs.pop("propname", None), kwargs.pop("propvalue", None)
# if propname == None: return
# hid = hashid(sender, "-%s" % propname)
# _PROP_CACHE.set(hid, propvalue)
#
#PROP_POST_UPDATE.connect(prop_update, dispatch_uid="propcache")
#
#
def flush_obj_caches(obj=None):
"Clean all caches on this object"
flush_field_cache(obj)
flush_prop_cache(obj)
flush_attr_cache(obj)
else:
#_ENABLE_LOCAL_CACHES = settings.GAME_CACHE_TYPE
## oob helper functions
# OOB hooks (OOB not yet functional, don't use yet)
#_OOB_FIELD_UPDATE_HOOKS = defaultdict(dict)
#_OOB_PROP_UPDATE_HOOKS = defaultdict(dict)
#_OOB_ATTR_UPDATE_HOOKS = defaultdict(dict)
#_OOB_NDB_UPDATE_HOOKS = defaultdict(dict)
#_OOB_CUSTOM_UPDATE_HOOKS = defaultdict(dict)
#
#_OOB_HANDLER = None # set by oob handler when it initializes
#def register_oob_update_hook(obj,name, entity="field"):
# """
# Register hook function to be called when field/property/db/ndb is updated.
# Given function will be called with function(obj, entityname, newvalue, *args, **kwargs)
# entity - one of "field", "property", "db", "ndb" or "custom"
# """
# hid = hashid(obj)
# if hid:
# if entity == "field":
# global _OOB_FIELD_UPDATE_HOOKS
# _OOB_FIELD_UPDATE_HOOKS[hid][name] = True
# return
# elif entity == "property":
# global _OOB_PROP_UPDATE_HOOKS
# _OOB_PROP_UPDATE_HOOKS[hid][name] = True
# elif entity == "db":
# global _OOB_ATTR_UPDATE_HOOKS
# _OOB_ATTR_UPDATE_HOOKS[hid][name] = True
# elif entity == "ndb":
# global _OOB_NDB_UPDATE_HOOKS
# _OOB_NDB_UPDATE_HOOKS[hid][name] = True
# elif entity == "custom":
# global _OOB_CUSTOM_UPDATE_HOOKS
# _OOB_CUSTOM_UPDATE_HOOKS[hid][name] = True
# else:
# return None
#
#def unregister_oob_update_hook(obj, name, entity="property"):
# """
# Un-register a report hook
# """
# hid = hashid(obj)
# if hid:
# global _OOB_FIELD_UPDATE_HOOKS,_OOB_PROP_UPDATE_HOOKS, _OOB_ATTR_UPDATE_HOOKS
# global _OOB_CUSTOM_UPDATE_HOOKS, _OOB_NDB_UPDATE_HOOKS
# if entity == "field" and name in _OOB_FIELD_UPDATE_HOOKS:
# del _OOB_FIELD_UPDATE_HOOKS[hid][name]
# elif entity == "property" and name in _OOB_PROP_UPDATE_HOOKS:
# del _OOB_PROP_UPDATE_HOOKS[hid][name]
# elif entity == "db" and name in _OOB_ATTR_UPDATE_HOOKS:
# del _OOB_ATTR_UPDATE_HOOKS[hid][name]
# elif entity == "ndb" and name in _OOB_NDB_UPDATE_HOOKS:
# del _OOB_NDB_UPDATE_HOOKS[hid][name]
# elif entity == "custom" and name in _OOB_CUSTOM_UPDATE_HOOKS:
# del _OOB_CUSTOM_UPDATE_HOOKS[hid][name]
# else:
# return None
#
#def call_ndb_hooks(obj, attrname, value):
# """
# No caching is done of ndb here, but
# we use this as a way to call OOB hooks.
# """
# hid = hashid(obj)
# if hid:
# oob_hook = _OOB_NDB_UPDATE_HOOKS[hid].get(attrname)
# if oob_hook:
# oob_hook[0](obj.typeclass, attrname, value, *oob_hook[1], **oob_hook[2])
#
#def call_custom_hooks(obj, attrname, value):
# """
# Custom handler for developers adding their own oob hooks, e.g. to
# custom typeclass properties.
# """
# hid = hashid(obj)
# if hid:
# oob_hook = _OOB_CUSTOM_UPDATE_HOOKS[hid].get(attrname)
# if oob_hook:
# oob_hook[0](obj.typeclass, attrname, value, *oob_hook[1], **oob_hook[2])
#
#
# # old cache system
#
# if _ENABLE_LOCAL_CACHES:
# # Cache stores
# _ATTR_CACHE = defaultdict(dict)
# _FIELD_CACHE = defaultdict(dict)
# _PROP_CACHE = defaultdict(dict)
#
#
# def get_cache_sizes():
# """
# Get cache sizes, expressed in number of objects and memory size in MB
# """
# global _ATTR_CACHE, _FIELD_CACHE, _PROP_CACHE
#
# attr_n = sum(len(dic) for dic in _ATTR_CACHE.values())
# attr_mb = sum(sum(getsizeof(obj) for obj in dic.values()) for dic in _ATTR_CACHE.values()) / 1024.0
#
# field_n = sum(len(dic) for dic in _FIELD_CACHE.values())
# field_mb = sum(sum([getsizeof(obj) for obj in dic.values()]) for dic in _FIELD_CACHE.values()) / 1024.0
#
# prop_n = sum(len(dic) for dic in _PROP_CACHE.values())
# prop_mb = sum(sum([getsizeof(obj) for obj in dic.values()]) for dic in _PROP_CACHE.values()) / 1024.0
#
# return (attr_n, attr_mb), (field_n, field_mb), (prop_n, prop_mb)
#
# # on-object database field cache
# def get_field_cache(obj, name):
# "On-model Cache handler."
# global _FIELD_CACHE
# hid = hashid(obj)
# if hid:
# try:
# return _FIELD_CACHE[hid][name]
# except KeyError:
# val = _GA(obj, "db_%s" % name)
# _FIELD_CACHE[hid][name] = val
# return val
# return _GA(obj, "db_%s" % name)
#
# def set_field_cache(obj, name, val):
# "On-model Cache setter. Also updates database."
# _SA(obj, "db_%s" % name, val)
# _GA(obj, "save")()
# hid = hashid(obj)
# if hid:
# global _FIELD_CACHE
# _FIELD_CACHE[hid][name] = val
# # oob hook functionality
# if _OOB_FIELD_UPDATE_HOOKS[hid].get(name):
# _OOB_HANDLER.update(hid, name, val)
#
# def del_field_cache(obj, name):
# "On-model cache deleter"
# hid = hashid(obj)
# _SA(obj, "db_%s" % name, None)
# _GA(obj, "save")()
# if hid:
# try:
# del _FIELD_CACHE[hid][name]
# except KeyError:
# pass
# if _OOB_FIELD_UPDATE_HOOKS[hid].get(name):
# _OOB_HANDLER.update(hid, name, None)
#
# def flush_field_cache(obj=None):
# "On-model cache resetter"
# hid = hashid(obj)
# global _FIELD_CACHE
# if hid:
# try:
# del _FIELD_CACHE[hashid(obj)]
# except KeyError, e:
# pass
# else:
# # clean cache completely
# _FIELD_CACHE = defaultdict(dict)
#
# # on-object property cache (unrelated to database)
# # Note that the get/set_prop_cache handler do not actually
# # get/set the property "on" the object but only reads the
# # value to/from the cache. This is intended to be used
# # with a get/setter property on the object.
#
# def get_prop_cache(obj, name, default=None):
# "On-model Cache handler."
# global _PROP_CACHE
# hid = hashid(obj)
# if hid:
# try:
# val = _PROP_CACHE[hid][name]
# except KeyError:
# return default
# _PROP_CACHE[hid][name] = val
# return val
# return default
#
# def set_prop_cache(obj, name, val):
# "On-model Cache setter. Also updates database."
# hid = hashid(obj)
# if hid:
# global _PROP_CACHE
# _PROP_CACHE[hid][name] = val
# # oob hook functionality
# oob_hook = _OOB_PROP_UPDATE_HOOKS[hid].get(name)
# if oob_hook:
# oob_hook[0](obj.typeclass, name, val, *oob_hook[1], **oob_hook[2])
#
#
# def del_prop_cache(obj, name):
# "On-model cache deleter"
# try:
# del _PROP_CACHE[hashid(obj)][name]
# except KeyError:
# pass
# def flush_prop_cache(obj=None):
# "On-model cache resetter"
# hid = hashid(obj)
# global _PROP_CACHE
# if hid:
# try:
# del _PROP_CACHE[hid]
# except KeyError,e:
# pass
# else:
# # clean cache completely
# _PROP_CACHE = defaultdict(dict)
#
# # attribute cache
#
# def get_attr_cache(obj, attrname):
# """
# Attribute cache store
# """
# return _ATTR_CACHE[hashid(obj)].get(attrname, None)
#
# def set_attr_cache(obj, attrname, attrobj):
# """
# Cache an attribute object
# """
# hid = hashid(obj)
# if hid:
# global _ATTR_CACHE
# _ATTR_CACHE[hid][attrname] = attrobj
# # oob hook functionality
# oob_hook = _OOB_ATTR_UPDATE_HOOKS[hid].get(attrname)
# if oob_hook:
# oob_hook[0](obj.typeclass, attrname, attrobj.value, *oob_hook[1], **oob_hook[2])
#
# def del_attr_cache(obj, attrname):
# """
# Remove attribute from cache
# """
# global _ATTR_CACHE
# try:
# _ATTR_CACHE[hashid(obj)][attrname].no_cache = True
# del _ATTR_CACHE[hashid(obj)][attrname]
# except KeyError:
# pass
#
# def flush_attr_cache(obj=None):
# """
# Flush the attribute cache for this object.
# """
# global _ATTR_CACHE
# if obj:
# for attrobj in _ATTR_CACHE[hashid(obj)].values():
# attrobj.no_cache = True
# del _ATTR_CACHE[hashid(obj)]
# else:
# # clean cache completely
# for objcache in _ATTR_CACHE.values():
# for attrobj in objcache.values():
# attrobj.no_cache = True
# _ATTR_CACHE = defaultdict(dict)
#
#
# def flush_obj_caches(obj=None):
# "Clean all caches on this object"
# flush_field_cache(obj)
# flush_prop_cache(obj)
# flush_attr_cache(obj)
#
#else:
# local caches disabled. Use simple pass-through replacements
def get_cache_sizes():
return (0, 0), (0, 0), (0, 0)
def get_field_cache(obj, name):
return _GA(obj, "db_%s" % name)
def set_field_cache(obj, name, val):
_SA(obj, "db_%s" % name, val)
_GA(obj, "save")()
hid = hashid(obj)
if _OOB_FIELD_UPDATE_HOOKS[hid].get(name):
_OOB_HANDLER.update(hid, name, val)
def del_field_cache(obj, name):
_SA(obj, "db_%s" % name, None)
_GA(obj, "save")()
hid = hashid(obj)
if _OOB_FIELD_UPDATE_HOOKS[hid].get(name):
_OOB_HANDLER.update(hid, name, None)
def flush_field_cache(obj=None):
pass
# these should get oob handlers when oob is implemented.
def get_prop_cache(obj, name, default=None):
return None
def set_prop_cache(obj, name, val):
pass
def del_prop_cache(obj, name):
pass
def flush_prop_cache(obj=None):
pass
def get_attr_cache(obj, attrname):
return None
def set_attr_cache(obj, attrname, attrobj):
pass
def del_attr_cache(obj, attrname):
pass
def flush_attr_cache(obj=None):
pass
def get_cache_sizes():
return (0, 0), (0, 0), (0, 0)
def get_field_cache(obj, name):
return _GA(obj, "db_%s" % name)
def set_field_cache(obj, name, val):
_SA(obj, "db_%s" % name, val)
_GA(obj, "save")()
#hid = hashid(obj)
#if _OOB_FIELD_UPDATE_HOOKS[hid].get(name):
# _OOB_HANDLER.update(hid, name, val)
def del_field_cache(obj, name):
_SA(obj, "db_%s" % name, None)
_GA(obj, "save")()
#hid = hashid(obj)
#if _OOB_FIELD_UPDATE_HOOKS[hid].get(name):
# _OOB_HANDLER.update(hid, name, None)
def flush_field_cache(obj=None):
pass
# these should get oob handlers when oob is implemented.
def get_prop_cache(obj, name, default=None):
return None
def set_prop_cache(obj, name, val):
pass
def del_prop_cache(obj, name):
pass
def flush_prop_cache(obj=None):
pass
def get_attr_cache(obj, attrname):
return None
def set_attr_cache(obj, attrname, attrobj):
pass
def del_attr_cache(obj, attrname):
pass
def flush_attr_cache(obj=None):
pass

View file

@ -30,6 +30,12 @@ from src.utils.utils import get_evennia_version, mod_import, make_iter
from src.comms import channelhandler
from src.server.sessionhandler import SESSIONS
# setting up server-side field cache
from django.db.models.signals import pre_save
from src.server.caches import field_pre_save
pre_save.connect(field_pre_save, dispatch_uid="fieldcache")
_SA = object.__setattr__
if os.name == 'nt':

View file

@ -159,14 +159,23 @@ DATABASES = {
'HOST':'',
'PORT':''
}}
# Engine Config style for Django versions < 1.2 only. See above.
DATABASE_ENGINE = 'sqlite3'
DATABASE_NAME = os.path.join(GAME_DIR, 'evennia.db3')
DATABASE_USER = ''
DATABASE_PASSWORD = ''
DATABASE_HOST = ''
DATABASE_PORT = ''
# This manages the object-level caches. Evennia will agressively cache
# fields, properties and attribute lookup. Evennia uses a fast and
# local in-memory cache by default. If a Memcached server is available
# it can be used instead (see django docs). Cache performance can be
# tweaked by adding options to each cache. Finally, any cache can
# be completely turned off by pointing its backend
# to 'django.core.cache.backends.dummy.DummyCache'.
CACHES = {
'default': {
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache'},
'field_cache': {
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache'},
'prop_cache': {
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache'},
'attr_cache': {
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache'},
}
######################################################################
# Evennia pluggable modules
######################################################################

View file

@ -38,11 +38,12 @@ from django.db import models, IntegrityError
from django.conf import settings
from django.utils.encoding import smart_str
from django.contrib.contenttypes.models import ContentType
from django.db.models.fields import AutoField, FieldDoesNotExist
from src.utils.idmapper.models import SharedMemoryModel
from src.server.caches import get_field_cache, set_field_cache, del_field_cache
from src.server.caches import get_attr_cache, set_attr_cache, del_attr_cache
from src.server.caches import get_prop_cache, set_prop_cache, del_prop_cache, flush_attr_cache
from src.server.caches import call_ndb_hooks
#from src.server.caches import call_ndb_hooks
from src.server.models import ServerConfig
from src.typeclasses import managers
from src.locks.lockhandler import LockHandler
@ -111,7 +112,7 @@ class Attribute(SharedMemoryModel):
# Lock storage
db_lock_storage = models.TextField('locks', blank=True)
# references the object the attribute is linked to (this is set
# by each child class to this abstact class)
# by each child class to this abstract class)
db_obj = None # models.ForeignKey("RefencedObject")
# time stamp
db_date_created = models.DateTimeField('date_created', editable=False, auto_now_add=True)
@ -455,20 +456,21 @@ class TypedObject(SharedMemoryModel):
# value = self.attr and del self.attr respectively (where self
# is the object in question).
# key property (wraps db_key)
#@property
def __key_get(self):
"Getter. Allows for value = self.key"
return get_field_cache(self, "key")
#@key.setter
def __key_set(self, value):
"Setter. Allows for self.key = value"
set_field_cache(self, "key", value)
#@key.deleter
def __key_del(self):
"Deleter. Allows for del self.key"
raise Exception("Cannot delete objectdb key!")
key = property(__key_get, __key_set, __key_del)
#def __key_get(self):
# "Getter. Allows for value = self.key"
# return get_field_cache(self, "key")
##@key.setter
#def __key_set(self, value):
# "Setter. Allows for self.key = value"
# set_field_cache(self, "key", value)
##@key.deleter
#def __key_del(self):
# "Deleter. Allows for del self.key"
# raise Exception("Cannot delete objectdb key!")
#key = property(__key_get, __key_set, __key_del)
# name property (wraps db_key too - alias to self.key)
#@property
@ -1244,7 +1246,7 @@ class TypedObject(SharedMemoryModel):
return None
def __setattr__(self, key, value):
# hook the oob handler here
call_ndb_hooks(self, key, value)
#call_ndb_hooks(self, key, value)
_SA(self, key, value)
self._ndb_holder = NdbHolder()
return self._ndb_holder

View file

@ -8,14 +8,19 @@ Also adds cache_size() for monitoring the size of the cache.
"""
import os, threading
from twisted.internet import reactor
#from twisted.internet import reactor
#from twisted.internet.threads import blockingCallFromThread
from twisted.internet.reactor import callFromThread
from twisted.internet.threads import blockingCallFromThread
from django.db.models.base import Model, ModelBase
from django.db.models.signals import post_save, pre_delete, post_syncdb
from manager import SharedMemoryManager
_GA = object.__getattribute__
_SA = object.__setattr__
_DA = object.__delattr__
# determine if our current pid is different from the server PID (i.e.
# if we are in a subprocess or not)
from src import PROC_MODIFIED_OBJS
@ -78,13 +83,39 @@ class SharedMemoryModelBase(ModelBase):
if cached_instance is None:
cached_instance = new_instance()
cls.cache_instance(cached_instance)
return cached_instance
def _prepare(cls):
cls.__instance_cache__ = {} #WeakValueDictionary()
super(SharedMemoryModelBase, cls)._prepare()
def __init__(cls, *args, **kwargs):
"Takes field names db_* and creates property wrappers named without the db_ prefix. So db_key -> key"
super(SharedMemoryModelBase, cls).__init__(*args, **kwargs)
def create_wrapper(cls, fieldname, wrappername):
"Helper method to create property wrappers with unique names (must be in separate call)"
def _get(cls, fname):
return _GA(cls, fname)
def _set(cls, fname, value):
_SA(cls, fname, value)
_GA(cls, "save")(update_fields=[fname]) # important!
def _del(cls, fname):
raise RuntimeError("You cannot delete field %s on %s; set it to None instead." % (fname, cls))
type(cls).__setattr__(cls, wrappername, property(lambda cls: _get(cls, fieldname),
lambda cls,val: _set(cls, fieldname, val),
lambda cls: _del(cls, fieldname)))
# eclude some models that should not auto-create wrapper fields
if cls.__name__ in ("ServerConfig", "TypeNick"):
return
# dynamically create the properties
for field in cls._meta.fields:
fieldname = field.name
wrappername = fieldname == "id" and "dbref" or fieldname.replace("db_", "")
if not hasattr(cls, wrappername):
# make sure not to overload manually created wrappers on the model
print "wrapping %s -> %s" % (fieldname, wrappername)
create_wrapper(cls, fieldname, wrappername)
class SharedMemoryModel(Model):
# CL: setting abstract correctly to allow subclasses to inherit the default
@ -126,6 +157,13 @@ class SharedMemoryModel(Model):
return result
_get_cache_key = classmethod(_get_cache_key)
def _flush_cached_by_key(cls, key):
try:
del cls.__instance_cache__[key]
except KeyError:
pass
_flush_cached_by_key = classmethod(_flush_cached_by_key)
def get_cached_instance(cls, id):
"""
Method to retrieve a cached instance by pk value. Returns None when not found
@ -148,13 +186,6 @@ class SharedMemoryModel(Model):
return cls.__instance_cache__.values()
get_all_cached_instances = classmethod(get_all_cached_instances)
def _flush_cached_by_key(cls, key):
try:
del cls.__instance_cache__[key]
except KeyError:
pass
_flush_cached_by_key = classmethod(_flush_cached_by_key)
def flush_cached_instance(cls, instance):
"""
Method to flush an instance from the cache. The instance will always be flushed from the cache,
@ -168,7 +199,7 @@ class SharedMemoryModel(Model):
flush_instance_cache = classmethod(flush_instance_cache)
def save(cls, *args, **kwargs):
"save tracking process/thread issues"
"save method tracking process/thread issues"
if _IS_SUBPROCESS:
# we keep a store of objects modified in subprocesses so
@ -210,8 +241,6 @@ def update_cached_instance(sender, instance, **kwargs):
if not hasattr(instance, 'cache_instance'):
return
sender.cache_instance(instance)
from src.server.caches import flush_obj_caches
flush_obj_caches(instance)
post_save.connect(update_cached_instance)
def cache_size(mb=True):