Removed extra caching for fields (this slowed things down over normal django field caching). Considering reworking Attribute storage in order to make use of those caches as well.

This commit is contained in:
Griatch 2013-06-06 12:45:39 +02:00
parent 7351aacba5
commit a0a94df83d
3 changed files with 66 additions and 31 deletions

View file

@ -18,13 +18,14 @@ _DA = object.__delattr__
# Open handles to the caches
#
_FIELD_CACHE = get_cache("field_cache")
_ATTR_CACHE = get_cache("attr_cache")
#_FIELD_CACHE = get_cache("field_cache")
_ATTR_CACHE = {}
#_ATTR_CACHE = get_cache("attr_cache")
#_PROP_CACHE = get_cache("prop_cache")
_PROP_CACHE = defaultdict(dict)
# make sure caches are empty at startup
_FIELD_CACHE.clear()
#_FIELD_CACHE.clear()
_ATTR_CACHE.clear()
#_PROP_CACHE.clear()
@ -93,10 +94,10 @@ def field_pre_save(sender, instance=None, update_fields=None, raw=False, **kwarg
print "field_pre_save:", _GA(instance, "db_key") if hasattr(instance, "db_key") else instance, update_fields
if update_fields:
# this is a list of strings at this point. We want field objects
update_fields = (instance._meta.get_field_by_name(field)[0] for field in update_fields)
update_fields = (_GA(_GA(instance, "_meta"), "get_field_by_name")(field)[0] for field in update_fields)
else:
# meta.fields are already field objects
update_fields = instance._meta.fields
update_fields = _GA(_GA(instance, "_meta"), "fields")
for field in update_fields:
fieldname = field.name
new_value = field.value_from_object(instance)
@ -105,23 +106,33 @@ def field_pre_save(sender, instance=None, update_fields=None, raw=False, **kwarg
handler = _GA(instance, handlername)
except AttributeError:
handler = None
hid = hashid(instance, "-%s" % fieldname)
#hid = hashid(instance, "-%s" % fieldname)
if callable(handler):
old_value = _FIELD_CACHE.get(hid) if hid else None
old_value = _GA(instance, _GA(field, "get_cache_name")())#_FIELD_CACHE.get(hid) if hid else None
# the handler may modify the stored value in various ways
# don't catch exceptions, the handler must work!
new_value = handler(new_value, old_value=old_value)
# we re-assign this to the field, save() will pick it up from there
_SA(instance, fieldname, new_value)
if hid:
# update cache
_FIELD_CACHE.set(hid, new_value)
#if hid:
# # update cache
# _FIELD_CACHE.set(hid, new_value)
# access method
def flush_field_cache():
"Clear the field cache"
_FIELD_CACHE.clear()
#
#def get_field_cache(obj, fieldname):
# "Called by _get wrapper"
# hid = hashid(obj, "-%s" % fieldname)
# return hid and _FIELD_CACHE.get(hid, None) or None
#
#def set_field_cache(obj, fieldname, value):
# hid = hashi(obj, "-%s" % fieldname)
# if hid:
# _FIELD_CACHE.set(hid, value)
#
#def flush_field_cache():
# "Clear the field cache"
# _FIELD_CACHE.clear()
#------------------------------------------------------------
@ -136,7 +147,9 @@ def attr_post_init(sender, instance=None, **kwargs):
#print "attr_post_init:", instance, instance.db_obj, instance.db_key
hid = hashid(_GA(instance, "db_obj"), "-%s" % _GA(instance, "db_key"))
if hid:
_ATTR_CACHE.set(hid, sender)
global _ATTR_CACHE
_ATTR_CACHE[hid] = sender
#_ATTR_CACHE.set(hid, sender)
# connected to pre_delete signal (connected in respective Attribute model)
def attr_pre_delete(sender, instance=None, **kwargs):
@ -145,14 +158,15 @@ def attr_pre_delete(sender, instance=None, **kwargs):
hid = hashid(_GA(instance, "db_obj"), "-%s" % _GA(instance, "db_key"))
if hid:
#print "attr_pre_delete:", _GA(instance, "db_key")
_ATTR_CACHE.delete(hid)
global _ATTR_CACHE
del _ATTR_CACHE[hid]
#_ATTR_CACHE.delete(hid)
# access methods
def get_attr_cache(obj, attrname):
"Called by get_attribute"
hid = hashid(obj, "-%s" % attrname)
_ATTR_CACHE.delete(hid)
return hid and _ATTR_CACHE.get(hid, None) or None
def set_attr_cache(attrobj):
@ -161,7 +175,9 @@ def set_attr_cache(attrobj):
def flush_attr_cache():
"Clear attribute cache"
_ATTR_CACHE.clear()
global _ATTR_CACHE
_ATTR_CACHE = {}
#_ATTR_CACHE.clear()
#------------------------------------------------------------
# Property cache - this is a generic cache for properties stored on models.

View file

@ -426,6 +426,8 @@ class TypedObject(SharedMemoryModel):
# Lock storage
db_lock_storage = models.TextField('locks', blank=True, help_text="locks limit access to an entity. A lock is defined as a 'lock string' on the form 'type:lockfunctions', defining what functionality is locked and how to determine access. Not defining a lock means no access is granted.")
#db_attributes = models.ManyToManyField(Attribute, related_name="%(app_label)s_%(class)s_related")
# Database manager
objects = managers.TypedObjectManager()
@ -460,7 +462,8 @@ class TypedObject(SharedMemoryModel):
#@property
#def __key_get(self):
# "Getter. Allows for value = self.key"
# return get_field_cache(self, "key")
# return _GA(self, "db_key")
# #return get_field_cache(self, "key")
##@key.setter
#def __key_set(self, value):
# "Setter. Allows for self.key = value"
@ -495,7 +498,8 @@ class TypedObject(SharedMemoryModel):
def __typeclass_path_set(self, value):
"Setter. Allows for self.typeclass_path = value"
_SA(self, "db_typeclass_path", value)
_GA(self, "save")(update_fields=["db_typeclass_path"])
update_fields = ["db_typeclass_path"] if _GA(self, "_get_pk_val")(_GA(self, "_meta")) is not None else None
_GA(self, "save")(update_fields=update_fields)
#@typeclass_path.deleter
def __typeclass_path_del(self):
"Deleter. Allows for del self.typeclass_path"
@ -587,6 +591,9 @@ class TypedObject(SharedMemoryModel):
try:
return _GA(self, propname)
except AttributeError:
if propname.startswith('_'):
# don't relay private/special varname lookups to the typeclass
raise AttributeError("private property %s not found on db model (typeclass not searched)." % propname)
# check if the attribute exists on the typeclass instead
# (we make sure to not incur a loop by not triggering the
# typeclass' __getattribute__, since that one would

View file

@ -18,6 +18,8 @@ from src.utils.utils import dbref
from manager import SharedMemoryManager
_FIELD_CACHE_GET = None
_FIELD_CACHE_SET = None
_GA = object.__getattribute__
_SA = object.__setattr__
_DA = object.__delattr__
@ -105,10 +107,12 @@ class SharedMemoryModelBase(ModelBase):
"Helper method to create property wrappers with unique names (must be in separate call)"
def _get(cls, fname):
"Wrapper for getting database field"
value = _GA(cls, fname)
if hasattr(value, "typeclass"):
return _GA(value, "typeclass")
#print "_get wrapper:", fname, value, type(value)
value = _GA(cls, fieldname)
if isinstance(value, (basestring, int, float, bool)):
return value
elif hasattr(value, "typeclass"):
return _GA(value, "typeclass")
return value
def _set(cls, fname, value):
"Wrapper for setting database field"
@ -126,16 +130,24 @@ class SharedMemoryModelBase(ModelBase):
err = "Could not set %s. Tried to treat value '%s' as a dbref, but no matching object with that id was found."
err = err % (fname, value)
raise ObjectDoesNotExist(err)
print "_set wrapper:", fname, value, type(value)
#print "_set wrapper:", fname, value, type(value), cls._get_pk_val(cls._meta)
_SA(cls, fname, value)
_GA(cls, "save")(update_fields=[fname]) # important - this saves one field only
# only use explicit update_fields in save if we actually have a
# primary key assigned already (won't be when first creating object)
update_fields = [fname] if _GA(cls, "_get_pk_val")(_GA(cls, "_meta")) is not None else None
_GA(cls, "save")(update_fields=update_fields)
def _del(cls, fname):
"Wrapper for clearing database field"
raise RuntimeError("You cannot delete field %s on %s; set it to None instead." % (fname, cls))
type(cls).__setattr__(cls, wrappername, property(fget=lambda cls: _get(cls, fieldname),
fset=lambda cls,val: _set(cls, fieldname, val),
fdel=lambda cls: _del(cls, fieldname),
doc="Wraps setting, saving and caching the %s field." % fieldname))
"Wrapper for clearing database field - sets it to None"
_SA(cls, fname, None)
update_fields = [fname] if _GA(cls, "_get_pk_val")(_GA(cls, "_meta")) is not None else None
_GA(cls, "save")(update_fields=update_fields)
# create class wrappers
fget = lambda cls: _get(cls, fieldname)
fset = lambda cls, val: _set(cls, fieldname, val)
fdel = lambda cls: _del(cls, fieldname)
doc = "Wraps setting, saving and deleting the %s field." % fieldname
type(cls).__setattr__(cls, wrappername, property(fget, fset, fdel, doc))
# exclude some models that should not auto-create wrapper fields
if cls.__name__ in ("ServerConfig", "TypeNick"):
return