Activated propcache with new cache system. Still not functioning correctly, also the content cache needs to be handled.

This commit is contained in:
Griatch 2013-05-29 23:07:44 +02:00
parent b6383ddab9
commit fb3259be8c
4 changed files with 87 additions and 66 deletions

View file

@ -65,7 +65,7 @@ class ObjAttribute(Attribute):
verbose_name = "Object Attribute"
verbose_name_plural = "Object Attributes"
# attach the cache handlers for attribute lookup
# attach the cache handlers
post_init.connect(attr_post_init, sender=ObjAttribute, dispatch_uid="objattrcache")
pre_delete.connect(attr_pre_delete, sender=ObjAttribute, dispatch_uid="objattrcache")
@ -248,7 +248,7 @@ class ObjectDB(TypedObject):
"Deleter. Allows for del self.aliases"
for alias in Alias.objects.filter(db_obj=self):
alias.delete()
del_prop_cache(self, "_aliases")
#del_prop_cache(self, "_aliases")
aliases = property(__aliases_get, __aliases_set, __aliases_del)
# player property (wraps db_player)

View file

@ -2,6 +2,7 @@
Central caching module.
"""
from django.dispatch import Signal
from django.core.cache import get_cache
#from django.db.models.signals import pre_save, pre_delete, post_init
from src.server.models import ServerConfig
@ -17,14 +18,16 @@ _DA = object.__delattr__
_FIELD_CACHE = get_cache("field_cache")
_ATTR_CACHE = get_cache("attr_cache")
_PROP_CACHE = get_cache("prop_cache")
# make sure caches are empty at startup
_FIELD_CACHE.clear()
_ATTR_CACHE.clear()
_PROP_CACHE.clear()
#
#------------------------------------------------------------
# Cache key hash generation
#
#------------------------------------------------------------
if uses_database("mysql") and ServerConfig.objects.get_mysql_db_version() < '5.6.4':
# mysql <5.6.4 don't support millisecond precision
@ -58,19 +61,23 @@ def hashid(obj, suffix=""):
if not idnum or not date:
# this will happen if setting properties on an object which is not yet saved
return None
# build the hashid
hid = "%s-%s-#%s%s" % (_GA(obj, "__class__"), date, idnum, suffix)
hid = hid.replace(" ", "")
hid = "%s-%s-#%s" % (_GA(obj, "__class__"), date, idnum)
hid = hid.replace(" ", "") # we have to remove the class-name's space, for memcached's sake
# we cache the object part of the hashid to avoid too many object lookups
_SA(obj, "_hashid", hid)
# build the complete hashid
hid = "%s%s" % (hid, suffix)
return to_str(hid)
#
#------------------------------------------------------------
# Cache callback handlers
#
#------------------------------------------------------------
#------------------------------------------------------------
# Field cache - makes sure to cache all database fields when
# they are saved, no matter from where.
#------------------------------------------------------------
# callback to pre_save signal (connected in src.server.server)
def field_pre_save(sender, instance=None, update_fields=None, raw=False, **kwargs):
@ -106,9 +113,18 @@ def field_pre_save(sender, instance=None, update_fields=None, raw=False, **kwarg
# update cache
_FIELD_CACHE.set(hid, new_value)
# access method
def flush_field_cache():
"Clear the field cache"
_FIELD_CACHE.clear()
#------------------------------------------------------------
# Attr cache - caching the attribute objects related to a given object to
# avoid lookups more than necessary (this makes Attributes en par in speed
# to any property).
#------------------------------------------------------------
# connected to post_init signal (connected in respective Attribute model)
def attr_post_init(sender, instance=None, **kwargs):
@ -117,6 +133,7 @@ def attr_post_init(sender, instance=None, **kwargs):
hid = hashid(_GA(instance, "db_obj"), "-%s" % _GA(instance, "db_key"))
if hid:
_ATTR_CACHE.set(hid, sender)
# connected to pre_delete signal (connected in respective Attribute model)
def attr_pre_delete(sender, instance=None, **kwargs):
"Called when attribute is deleted (del_attribute)"
@ -125,42 +142,52 @@ def attr_pre_delete(sender, instance=None, **kwargs):
if hid:
#print "attr_pre_delete:", _GA(instance, "db_key")
_ATTR_CACHE.delete(hid)
# access method
# access methods
def get_attr_cache(obj, attrname):
"Called by get_attribute"
hid = hashid(obj, "-%s" % attrname)
_ATTR_CACHE.delete(hid)
return hid and _ATTR_CACHE.get(hid, None) or None
def set_attr_cache(attrobj):
"Set the attr cache manually; this can be used to update"
attr_post_init(None, instance=attrobj)
def flush_attr_cache():
"Clear attribute cache"
_ATTR_CACHE.clear()
## property cache - this doubles as a central cache and as a way
## to trigger oob on such changes.
#
#from django.dispatch import Signal
#_PROP_CACHE = get_cache("prop_cache")
#if not _PROP_CACHE:
# raise RuntimeError("settings.CACHE does not contain a 'prop_cache' entry!")
#
#PROP_POST_UPDATE = Signal(providing_args=["propname", "propvalue"])
#
#def prop_update(sender, **kwargs):
# "Called when a propery is updated. kwargs are propname and propvalue."
# propname, propvalue = kwargs.pop("propname", None), kwargs.pop("propvalue", None)
# if propname == None: return
# hid = hashid(sender, "-%s" % propname)
# _PROP_CACHE.set(hid, propvalue)
#
#PROP_POST_UPDATE.connect(prop_update, dispatch_uid="propcache")
#
#
#------------------------------------------------------------
# Property cache - this is a generic cache for properties stored on models.
#------------------------------------------------------------
# access methods
def get_prop_cache(obj, propname):
"retrieve data from cache"
hid = hashid(obj, "-%s" % propname)
if hid:
#print "get_prop_cache", hid, propname, _PROP_CACHE.get(hid, None)
return _PROP_CACHE.get(hid, None)
def set_prop_cache(obj, propname, propvalue):
"Set property cache"
hid = hashid(obj, "-%s" % propname)
if hid:
#print "set_prop_cache", propname, propvalue
_PROP_CACHE.set(hid, propvalue)
def del_prop_cache(obj, propname):
"Delete element from property cache"
hid = hashid(obj, "-%s" % propname)
if hid:
_PROP_CACHE.delete(hid)
def flush_prop_cache():
"Clear property cache"
_PROP_CACHE.clear()
#_ENABLE_LOCAL_CACHES = settings.GAME_CACHE_TYPE
@ -448,23 +475,23 @@ def del_field_cache(obj, name):
#hid = hashid(obj)
#if _OOB_FIELD_UPDATE_HOOKS[hid].get(name):
# _OOB_HANDLER.update(hid, name, None)
def flush_field_cache(obj=None):
pass
#def flush_field_cache(obj=None):
# pass
# these should get oob handlers when oob is implemented.
def get_prop_cache(obj, name, default=None):
return None
def set_prop_cache(obj, name, val):
pass
def del_prop_cache(obj, name):
pass
def flush_prop_cache(obj=None):
pass
#def get_prop_cache(obj, name, default=None):
# return None
#def set_prop_cache(obj, name, val):
# pass
#def del_prop_cache(obj, name):
# pass
#def flush_prop_cache(obj=None):
# pass
#def get_attr_cache(obj, attrname):
# return None
def set_attr_cache(obj, attrname, attrobj):
pass
def del_attr_cache(obj, attrname):
pass
def flush_attr_cache(obj=None):
pass
#def set_attr_cache(obj, attrname, attrobj):
# pass
#def del_attr_cache(obj, attrname):
# pass
#def flush_attr_cache(obj=None):
# pass

View file

@ -41,7 +41,7 @@ from django.contrib.contenttypes.models import ContentType
from django.db.models.fields import AutoField, FieldDoesNotExist
from src.utils.idmapper.models import SharedMemoryModel
from src.server.caches import get_field_cache, set_field_cache, del_field_cache
from src.server.caches import get_attr_cache, set_attr_cache, del_attr_cache
from src.server.caches import get_attr_cache, set_attr_cache
from src.server.caches import get_prop_cache, set_prop_cache, del_prop_cache, flush_attr_cache
#from src.server.caches import call_ndb_hooks
from src.server.models import ServerConfig
@ -60,8 +60,6 @@ _CTYPEGET = ContentType.objects.get
_GA = object.__getattribute__
_SA = object.__setattr__
_DA = object.__delattr__
#_PLOADS = pickle.loads
#_PDUMPS = pickle.dumps
#------------------------------------------------------------
#
@ -933,11 +931,10 @@ class TypedObject(SharedMemoryModel):
if not get_attr_cache(self, attribute_name):
attrib_obj = _GA(self, "_attribute_class").objects.filter(
db_obj=self, db_key__iexact=attribute_name)
if not attrib_obj:
if attrib_obj:
set_attr_cache(attrib_obj[0])
else:
return False
#set_attr_cache(self, attribute_name, attrib_obj[0])
#else:
# return False
return True
def set_attribute(self, attribute_name, new_value=None, lockstring=""):
@ -954,7 +951,6 @@ class TypedObject(SharedMemoryModel):
types checked by secureattr are 'attrread','attredit','attrcreate'.
"""
attrib_obj = get_attr_cache(self, attribute_name)
print "set_attribute:", attribute_name, attrib_obj
if not attrib_obj:
attrclass = _GA(self, "_attribute_class")
# check if attribute already exists.
@ -963,8 +959,9 @@ class TypedObject(SharedMemoryModel):
if attrib_obj:
# use old attribute
attrib_obj = attrib_obj[0]
set_attr_cache(attrib_obj) # renew cache
else:
# no match; create new attribute
# no match; create new attribute (this will cache automatically)
attrib_obj = attrclass(db_key=attribute_name, db_obj=self)
if lockstring:
attrib_obj.locks.add(lockstring)
@ -977,7 +974,6 @@ class TypedObject(SharedMemoryModel):
flush_attr_cache(self)
self.delete()
raise IntegrityError("Attribute could not be saved - object %s was deleted from database." % self.key)
#set_attr_cache(self, attribute_name, attrib_obj)
def get_attribute_obj(self, attribute_name, default=None):
"""
@ -989,7 +985,7 @@ class TypedObject(SharedMemoryModel):
db_obj=self, db_key__iexact=attribute_name)
if not attrib_obj:
return default
#set_attr_cache(self, attribute_name, attrib_obj[0]) #query is first evaluated here
set_attr_cache(attrib_obj[0]) #query is first evaluated here
return attrib_obj[0]
return attrib_obj
@ -1008,7 +1004,7 @@ class TypedObject(SharedMemoryModel):
db_obj=self, db_key__iexact=attribute_name)
if not attrib_obj:
return default
#set_attr_cache(self, attribute_name, attrib_obj[0]) #query is first evaluated here
set_attr_cache(attrib_obj[0]) #query is first evaluated here
return attrib_obj[0].value
return attrib_obj.value
@ -1025,7 +1021,7 @@ class TypedObject(SharedMemoryModel):
db_obj=self, db_key__iexact=attribute_name)
if not attrib_obj:
raise AttributeError
#set_attr_cache(self, attribute_name, attrib_obj[0]) #query is first evaluated here
set_attr_cache(attrib_obj[0]) #query is first evaluated here
return attrib_obj[0].value
return attrib_obj.value
@ -1037,8 +1033,7 @@ class TypedObject(SharedMemoryModel):
"""
attr_obj = get_attr_cache(self, attribute_name)
if attr_obj:
del_attr_cache(self, attribute_name)
attr_obj.delete()
attr_obj.delete() # this will clear attr cache automatically
else:
try:
_GA(self, "_attribute_class").objects.filter(
@ -1055,8 +1050,7 @@ class TypedObject(SharedMemoryModel):
"""
attr_obj = get_attr_cache(self, attribute_name)
if attr_obj:
del_attr_cache(self, attribute_name)
attr_obj.delete()
attr_obj.delete() # this will clear attr cache automatically
else:
try:
_GA(self, "_attribute_class").objects.filter(

View file

@ -116,7 +116,7 @@ class SharedMemoryModelBase(ModelBase):
# dynamically create the properties
for field in cls._meta.fields:
fieldname = field.name
wrappername = fieldname == "id" and "dbref" or fieldname.replace("db_", "")
wrappername = fieldname == "id" and "dbid" or fieldname.replace("db_", "")
if not hasattr(cls, wrappername):
# make sure not to overload manually created wrappers on the model
#print "wrapping %s -> %s" % (fieldname, wrappername)