mirror of
https://github.com/evennia/evennia.git
synced 2026-03-16 21:06:30 +01:00
Added the ability to deactivate local caching, by use of the settings.GAME_CACHE_TYPE variable. A temporary solution for working with multiple processes. Will also be useful for debugging and profiling.
This commit is contained in:
parent
15feb915e6
commit
ea545d7ff3
4 changed files with 218 additions and 157 deletions
|
|
@ -21,6 +21,7 @@ from src.typeclasses.typeclass import TypeClass
|
|||
from src.commands import cmdset, command
|
||||
from src.comms.models import Channel
|
||||
from src.utils import logger
|
||||
|
||||
__all__ = ("Object", "Character", "Room", "Exit")
|
||||
|
||||
_GA = object.__getattribute__
|
||||
|
|
|
|||
|
|
@ -5,16 +5,14 @@ Central caching module.
|
|||
|
||||
from sys import getsizeof
|
||||
from collections import defaultdict
|
||||
from django.conf import settings
|
||||
|
||||
_ENABLE_LOCAL_CACHES = settings.GAME_CACHE_TYPE
|
||||
|
||||
_GA = object.__getattribute__
|
||||
_SA = object.__setattr__
|
||||
_DA = object.__delattr__
|
||||
|
||||
# Cache stores
|
||||
_ATTR_CACHE = defaultdict(dict)
|
||||
_FIELD_CACHE = defaultdict(dict)
|
||||
_PROP_CACHE = defaultdict(dict)
|
||||
|
||||
# OOB hooks (OOB not yet functional, don't use yet)
|
||||
_OOB_FIELD_UPDATE_HOOKS = defaultdict(dict)
|
||||
_OOB_PROP_UPDATE_HOOKS = defaultdict(dict)
|
||||
|
|
@ -24,23 +22,6 @@ _OOB_CUSTOM_UPDATE_HOOKS = defaultdict(dict)
|
|||
|
||||
_OOB_HANDLER = None # set by oob handler when it initializes
|
||||
|
||||
def get_cache_sizes():
|
||||
"""
|
||||
Get cache sizes, expressed in number of objects and memory size in MB
|
||||
"""
|
||||
global _ATTR_CACHE, _FIELD_CACHE, _PROP_CACHE
|
||||
|
||||
attr_n = sum(len(dic) for dic in _ATTR_CACHE.values())
|
||||
attr_mb = sum(sum(getsizeof(obj) for obj in dic.values()) for dic in _ATTR_CACHE.values()) / 1024.0
|
||||
|
||||
field_n = sum(len(dic) for dic in _FIELD_CACHE.values())
|
||||
field_mb = sum(sum([getsizeof(obj) for obj in dic.values()]) for dic in _FIELD_CACHE.values()) / 1024.0
|
||||
|
||||
prop_n = sum(len(dic) for dic in _PROP_CACHE.values())
|
||||
prop_mb = sum(sum([getsizeof(obj) for obj in dic.values()]) for dic in _PROP_CACHE.values()) / 1024.0
|
||||
|
||||
return (attr_n, attr_mb), (field_n, field_mb), (prop_n, prop_mb)
|
||||
|
||||
def hashid(obj):
|
||||
"""
|
||||
Returns a per-class unique that combines the object's
|
||||
|
|
@ -117,141 +98,6 @@ def unregister_oob_update_hook(obj, name, entity="property"):
|
|||
else:
|
||||
return None
|
||||
|
||||
# on-object database field cache
|
||||
def get_field_cache(obj, name):
|
||||
"On-model Cache handler."
|
||||
global _FIELD_CACHE
|
||||
hid = hashid(obj)
|
||||
if hid:
|
||||
try:
|
||||
return _FIELD_CACHE[hid][name]
|
||||
except KeyError:
|
||||
val = _GA(obj, "db_%s" % name)
|
||||
_FIELD_CACHE[hid][name] = val
|
||||
return val
|
||||
return _GA(obj, "db_%s" % name)
|
||||
|
||||
def set_field_cache(obj, name, val):
|
||||
"On-model Cache setter. Also updates database."
|
||||
_SA(obj, "db_%s" % name, val)
|
||||
_GA(obj, "save")()
|
||||
hid = hashid(obj)
|
||||
if hid:
|
||||
global _FIELD_CACHE
|
||||
_FIELD_CACHE[hid][name] = val
|
||||
# oob hook functionality
|
||||
if _OOB_FIELD_UPDATE_HOOKS[hid].get(name):
|
||||
_OOB_HANDLER.update(hid, name, val)
|
||||
|
||||
def del_field_cache(obj, name):
|
||||
"On-model cache deleter"
|
||||
hid = hashid(obj)
|
||||
if hid:
|
||||
try:
|
||||
del _FIELD_CACHE[hid][name]
|
||||
except KeyError:
|
||||
pass
|
||||
|
||||
def flush_field_cache(obj=None):
|
||||
"On-model cache resetter"
|
||||
hid = hashid(obj)
|
||||
global _FIELD_CACHE
|
||||
if hid:
|
||||
del _FIELD_CACHE[hashid(obj)]
|
||||
else:
|
||||
# clean cache completely
|
||||
_FIELD_CACHE = defaultdict(dict)
|
||||
|
||||
# on-object property cache (unrelated to database)
|
||||
# Note that the get/set_prop_cache handler do not actually
|
||||
# get/set the property "on" the object but only reads the
|
||||
# value to/from the cache. This is intended to be used
|
||||
# with a get/setter property on the object.
|
||||
|
||||
def get_prop_cache(obj, name, default=None):
|
||||
"On-model Cache handler."
|
||||
global _PROP_CACHE
|
||||
hid = hashid(obj)
|
||||
if hid:
|
||||
try:
|
||||
val = _PROP_CACHE[hid][name]
|
||||
except KeyError:
|
||||
return default
|
||||
_PROP_CACHE[hid][name] = val
|
||||
return val
|
||||
return default
|
||||
|
||||
def set_prop_cache(obj, name, val):
|
||||
"On-model Cache setter. Also updates database."
|
||||
hid = hashid(obj)
|
||||
if hid:
|
||||
global _PROP_CACHE
|
||||
_PROP_CACHE[hid][name] = val
|
||||
# oob hook functionality
|
||||
oob_hook = _OOB_PROP_UPDATE_HOOKS[hid].get(name)
|
||||
if oob_hook:
|
||||
oob_hook[0](obj.typeclass, name, val, *oob_hook[1], **oob_hook[2])
|
||||
|
||||
|
||||
def del_prop_cache(obj, name):
|
||||
"On-model cache deleter"
|
||||
try:
|
||||
del _PROP_CACHE[hashid(obj)][name]
|
||||
except KeyError:
|
||||
pass
|
||||
def flush_prop_cache(obj=None):
|
||||
"On-model cache resetter"
|
||||
hid = hashid(obj)
|
||||
global _PROP_CACHE
|
||||
if hid:
|
||||
del _PROP_CACHE[hashid(obj)]
|
||||
else:
|
||||
# clean cache completely
|
||||
_PROP_CACHE = defaultdict(dict)
|
||||
|
||||
# attribute cache
|
||||
|
||||
def get_attr_cache(obj, attrname):
|
||||
"""
|
||||
Attribute cache store
|
||||
"""
|
||||
return _ATTR_CACHE[hashid(obj)].get(attrname, None)
|
||||
|
||||
def set_attr_cache(obj, attrname, attrobj):
|
||||
"""
|
||||
Cache an attribute object
|
||||
"""
|
||||
hid = hashid(obj)
|
||||
if hid:
|
||||
global _ATTR_CACHE
|
||||
_ATTR_CACHE[hid][attrname] = attrobj
|
||||
# oob hook functionality
|
||||
oob_hook = _OOB_ATTR_UPDATE_HOOKS[hid].get(attrname)
|
||||
if oob_hook:
|
||||
oob_hook[0](obj.typeclass, attrname, attrobj.value, *oob_hook[1], **oob_hook[2])
|
||||
|
||||
def del_attr_cache(obj, attrname):
|
||||
"""
|
||||
Remove attribute from cache
|
||||
"""
|
||||
global _ATTR_CACHE
|
||||
try:
|
||||
del _ATTR_CACHE[hashid(obj)][attrname]
|
||||
except KeyError:
|
||||
pass
|
||||
|
||||
def flush_attr_cache(obj=None):
|
||||
"""
|
||||
Flush the attribute cache for this object.
|
||||
"""
|
||||
global _ATTR_CACHE
|
||||
if obj:
|
||||
del _ATTR_CACHE[hashid(obj)]
|
||||
else:
|
||||
# clean cache completely
|
||||
_ATTR_CACHE = defaultdict(dict)
|
||||
|
||||
|
||||
def call_ndb_hooks(obj, attrname, value):
|
||||
"""
|
||||
No caching is done of ndb here, but
|
||||
|
|
@ -273,3 +119,208 @@ def call_custom_hooks(obj, attrname, value):
|
|||
oob_hook = _OOB_CUSTOM_UPDATE_HOOKS[hid].get(attrname)
|
||||
if oob_hook:
|
||||
oob_hook[0](obj.typeclass, attrname, value, *oob_hook[1], **oob_hook[2])
|
||||
|
||||
|
||||
if _ENABLE_LOCAL_CACHES:
|
||||
|
||||
# Cache stores
|
||||
_ATTR_CACHE = defaultdict(dict)
|
||||
_FIELD_CACHE = defaultdict(dict)
|
||||
_PROP_CACHE = defaultdict(dict)
|
||||
|
||||
|
||||
def get_cache_sizes():
|
||||
"""
|
||||
Get cache sizes, expressed in number of objects and memory size in MB
|
||||
"""
|
||||
global _ATTR_CACHE, _FIELD_CACHE, _PROP_CACHE
|
||||
|
||||
attr_n = sum(len(dic) for dic in _ATTR_CACHE.values())
|
||||
attr_mb = sum(sum(getsizeof(obj) for obj in dic.values()) for dic in _ATTR_CACHE.values()) / 1024.0
|
||||
|
||||
field_n = sum(len(dic) for dic in _FIELD_CACHE.values())
|
||||
field_mb = sum(sum([getsizeof(obj) for obj in dic.values()]) for dic in _FIELD_CACHE.values()) / 1024.0
|
||||
|
||||
prop_n = sum(len(dic) for dic in _PROP_CACHE.values())
|
||||
prop_mb = sum(sum([getsizeof(obj) for obj in dic.values()]) for dic in _PROP_CACHE.values()) / 1024.0
|
||||
|
||||
return (attr_n, attr_mb), (field_n, field_mb), (prop_n, prop_mb)
|
||||
|
||||
# on-object database field cache
|
||||
def get_field_cache(obj, name):
|
||||
"On-model Cache handler."
|
||||
global _FIELD_CACHE
|
||||
hid = hashid(obj)
|
||||
if hid:
|
||||
try:
|
||||
return _FIELD_CACHE[hid][name]
|
||||
except KeyError:
|
||||
val = _GA(obj, "db_%s" % name)
|
||||
_FIELD_CACHE[hid][name] = val
|
||||
return val
|
||||
return _GA(obj, "db_%s" % name)
|
||||
|
||||
def set_field_cache(obj, name, val):
|
||||
"On-model Cache setter. Also updates database."
|
||||
_SA(obj, "db_%s" % name, val)
|
||||
_GA(obj, "save")()
|
||||
hid = hashid(obj)
|
||||
if hid:
|
||||
global _FIELD_CACHE
|
||||
_FIELD_CACHE[hid][name] = val
|
||||
# oob hook functionality
|
||||
if _OOB_FIELD_UPDATE_HOOKS[hid].get(name):
|
||||
_OOB_HANDLER.update(hid, name, val)
|
||||
|
||||
def del_field_cache(obj, name):
|
||||
"On-model cache deleter"
|
||||
hid = hashid(obj)
|
||||
_SA(obj, "db_%s" % name, None)
|
||||
_GA(obj, "save")()
|
||||
if hid:
|
||||
try:
|
||||
del _FIELD_CACHE[hid][name]
|
||||
except KeyError:
|
||||
pass
|
||||
if _OOB_FIELD_UPDATE_HOOKS[hid].get(name):
|
||||
_OOB_HANDLER.update(hid, name, None)
|
||||
|
||||
def flush_field_cache(obj=None):
|
||||
"On-model cache resetter"
|
||||
hid = hashid(obj)
|
||||
global _FIELD_CACHE
|
||||
if hid:
|
||||
del _FIELD_CACHE[hashid(obj)]
|
||||
else:
|
||||
# clean cache completely
|
||||
_FIELD_CACHE = defaultdict(dict)
|
||||
|
||||
# on-object property cache (unrelated to database)
|
||||
# Note that the get/set_prop_cache handler do not actually
|
||||
# get/set the property "on" the object but only reads the
|
||||
# value to/from the cache. This is intended to be used
|
||||
# with a get/setter property on the object.
|
||||
|
||||
def get_prop_cache(obj, name, default=None):
|
||||
"On-model Cache handler."
|
||||
global _PROP_CACHE
|
||||
hid = hashid(obj)
|
||||
if hid:
|
||||
try:
|
||||
val = _PROP_CACHE[hid][name]
|
||||
except KeyError:
|
||||
return default
|
||||
_PROP_CACHE[hid][name] = val
|
||||
return val
|
||||
return default
|
||||
|
||||
def set_prop_cache(obj, name, val):
|
||||
"On-model Cache setter. Also updates database."
|
||||
hid = hashid(obj)
|
||||
if hid:
|
||||
global _PROP_CACHE
|
||||
_PROP_CACHE[hid][name] = val
|
||||
# oob hook functionality
|
||||
oob_hook = _OOB_PROP_UPDATE_HOOKS[hid].get(name)
|
||||
if oob_hook:
|
||||
oob_hook[0](obj.typeclass, name, val, *oob_hook[1], **oob_hook[2])
|
||||
|
||||
|
||||
def del_prop_cache(obj, name):
|
||||
"On-model cache deleter"
|
||||
try:
|
||||
del _PROP_CACHE[hashid(obj)][name]
|
||||
except KeyError:
|
||||
pass
|
||||
def flush_prop_cache(obj=None):
|
||||
"On-model cache resetter"
|
||||
hid = hashid(obj)
|
||||
global _PROP_CACHE
|
||||
if hid:
|
||||
del _PROP_CACHE[hashid(obj)]
|
||||
else:
|
||||
# clean cache completely
|
||||
_PROP_CACHE = defaultdict(dict)
|
||||
|
||||
# attribute cache
|
||||
|
||||
def get_attr_cache(obj, attrname):
|
||||
"""
|
||||
Attribute cache store
|
||||
"""
|
||||
return _ATTR_CACHE[hashid(obj)].get(attrname, None)
|
||||
|
||||
def set_attr_cache(obj, attrname, attrobj):
|
||||
"""
|
||||
Cache an attribute object
|
||||
"""
|
||||
hid = hashid(obj)
|
||||
if hid:
|
||||
global _ATTR_CACHE
|
||||
_ATTR_CACHE[hid][attrname] = attrobj
|
||||
# oob hook functionality
|
||||
oob_hook = _OOB_ATTR_UPDATE_HOOKS[hid].get(attrname)
|
||||
if oob_hook:
|
||||
oob_hook[0](obj.typeclass, attrname, attrobj.value, *oob_hook[1], **oob_hook[2])
|
||||
|
||||
def del_attr_cache(obj, attrname):
|
||||
"""
|
||||
Remove attribute from cache
|
||||
"""
|
||||
global _ATTR_CACHE
|
||||
try:
|
||||
del _ATTR_CACHE[hashid(obj)][attrname]
|
||||
except KeyError:
|
||||
pass
|
||||
|
||||
def flush_attr_cache(obj=None):
|
||||
"""
|
||||
Flush the attribute cache for this object.
|
||||
"""
|
||||
global _ATTR_CACHE
|
||||
if obj:
|
||||
del _ATTR_CACHE[hashid(obj)]
|
||||
else:
|
||||
# clean cache completely
|
||||
_ATTR_CACHE = defaultdict(dict)
|
||||
|
||||
|
||||
else:
|
||||
# local caches disabled. Use simple pass-through replacements
|
||||
|
||||
def get_cache_sizes():
|
||||
return (0, 0), (0, 0), (0, 0)
|
||||
def get_field_cache(obj, name):
|
||||
return _GA(obj, "db_%s" % name)
|
||||
def set_field_cache(obj, name, val):
|
||||
_SA(obj, "db_%s" % name, val)
|
||||
_GA(obj, "save")()
|
||||
hid = hashid(obj)
|
||||
if _OOB_FIELD_UPDATE_HOOKS[hid].get(name):
|
||||
_OOB_HANDLER.update(hid, name, val)
|
||||
def del_field_cache(obj, name):
|
||||
_SA(obj, "db_%s" % name, None)
|
||||
_GA(obj, "save")()
|
||||
hid = hashid(obj)
|
||||
if _OOB_FIELD_UPDATE_HOOKS[hid].get(name):
|
||||
_OOB_HANDLER.update(hid, name, None)
|
||||
def flush_field_cache(obj=None):
|
||||
pass
|
||||
# these should get oob handlers when oob is implemented.
|
||||
def get_prop_cache(obj, name, default=None):
|
||||
return None
|
||||
def set_prop_cache(obj, name, val):
|
||||
pass
|
||||
def del_prop_cache(obj, name):
|
||||
pass
|
||||
def flush_prop_cache(obj=None):
|
||||
pass
|
||||
def get_attr_cache(obj, attrname):
|
||||
return None
|
||||
def set_attr_cache(obj, attrname, attrobj):
|
||||
pass
|
||||
def del_attr_cache(obj, attrname):
|
||||
pass
|
||||
def flush_attr_cache(obj=None):
|
||||
pass
|
||||
|
||||
|
|
|
|||
|
|
@ -325,6 +325,9 @@ EVENNIA = Evennia(application)
|
|||
print '-' * 50
|
||||
print ' %(servername)s Server (%(version)s) started.' % {'servername': SERVERNAME, 'version': VERSION}
|
||||
|
||||
if not settings.GAME_CACHE_TYPE:
|
||||
print " caching disabled"
|
||||
|
||||
if AMP_ENABLED:
|
||||
|
||||
# The AMP protocol handles the communication between
|
||||
|
|
|
|||
|
|
@ -111,6 +111,12 @@ ENCODINGS = ["utf-8", "latin-1", "ISO-8859-1"]
|
|||
AMP_HOST = 'localhost'
|
||||
AMP_PORT = 5000
|
||||
AMP_INTERFACE = '127.0.0.1'
|
||||
# Caching speeds up all forms of database access, often considerably. There
|
||||
# are (currently) only two settings, "local" or None, the latter of which turns
|
||||
# off all caching completely. Local caching stores data in the process. It's very
|
||||
# fast but will go out of sync if more than one process writes to the database (such
|
||||
# as when using procpool or an extensice web precense).
|
||||
GAME_CACHE_TYPE = "local"
|
||||
# Attributes on objects are cached aggressively for speed. If the number of
|
||||
# objects is large (and their attributes are often accessed) this can use up a lot of
|
||||
# memory. So every now and then Evennia checks the size of this cache and resets
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue