From 76fa0059ea83d0ed5bc3a5871d62b16873d186a9 Mon Sep 17 00:00:00 2001 From: Griatch Date: Wed, 22 May 2013 18:18:50 +0200 Subject: [PATCH 01/12] First preparations for moving webserver to Server process. --- src/utils/dbserialize.py | 26 ++++++++++++---------- src/utils/idmapper/base.py | 45 ++++++++++++++++++++++++++------------ src/utils/utils.py | 4 ++-- 3 files changed, 48 insertions(+), 27 deletions(-) diff --git a/src/utils/dbserialize.py b/src/utils/dbserialize.py index b1d7682bf1..0c24908480 100644 --- a/src/utils/dbserialize.py +++ b/src/utils/dbserialize.py @@ -31,10 +31,6 @@ from django.contrib.contenttypes.models import ContentType from src.utils.utils import to_str, uses_database from src.utils import logger - - - - __all__ = ("to_pickle", "from_pickle", "do_pickle", "do_unpickle") PICKLE_PROTOCOL = 2 @@ -47,13 +43,21 @@ _FROM_MODEL_MAP = None _TO_MODEL_MAP = None _TO_TYPECLASS = lambda o: hasattr(o, 'typeclass') and o.typeclass or o _IS_PACKED_DBOBJ = lambda o: type(o) == tuple and len(o) == 4 and o[0] == '__packed_dbobj__' -_TO_DATESTRING = lambda o: _GA(o, "db_date_created").strftime("%Y:%m:%d-%H:%M:%S:%f") -if uses_database("mysql"): - from src.server.models import ServerConfig - mysql_version = ServerConfig.objects.get_mysql_db_version() - if mysql_version < '5.6.4': - # mysql <5.6.4 don't support millisecond precision - _TO_DATESTRING = lambda o: _GA(o, "db_date_created").strftime("%Y:%m:%d-%H:%M:%S:000000") +if uses_database("mysql") and ServerConfig.objects.get_mysql_db_version() < '5.6.4': + # mysql <5.6.4 don't support millisecond precision + _DATESTRING = "%Y:%m:%d-%H:%M:%S:000000" +else: + _DATESTRING = "%Y:%m:%d-%H:%M:%S:%f" + +def _TO_DATESTRING(obj): + "this will only be called with valid database objects. Returns datestring on correct form." + try: + return _GA(obj, "db_date_created").strftime(_DATESTRING) + except AttributeError: + # this can happen if object is not yet saved - no datestring is then set + obj.save() + return _GA(obj, "db_date_created").strftime(_DATESTRING) + def _init_globals(): "Lazy importing to avoid circular import issues" diff --git a/src/utils/idmapper/base.py b/src/utils/idmapper/base.py index d56948c57f..c2a7a01280 100755 --- a/src/utils/idmapper/base.py +++ b/src/utils/idmapper/base.py @@ -7,10 +7,12 @@ leave caching unexpectedly (no use if WeakRefs). Also adds cache_size() for monitoring the size of the cache. """ -import os +import os, threading +from twisted.internet import reactor +from twisted.internet.reactor import callFromThread +from twisted.internet.threads import blockingCallFromThread from django.db.models.base import Model, ModelBase -from django.db.models.signals import post_save, pre_delete, \ - post_syncdb +from django.db.models.signals import post_save, pre_delete, post_syncdb from manager import SharedMemoryManager @@ -37,11 +39,19 @@ def _get_pids(): if server_pid and portal_pid: return int(server_pid), int(portal_pid) return None, None -_SELF_PID = os.getpid() -_SERVER_PID = None -_PORTAL_PID = None -_IS_SUBPROCESS = False +# get info about the current process and thread + +_SELF_PID = os.getpid() +_SERVER_PID, _PORTAL_PID = _get_pids() +_IS_SUBPROCESS = (_SERVER_PID and _PORTAL_PID) and not _SELF_PID in (_SERVER_PID, _PORTAL_PID) +_IS_MAIN_THREAD = threading.currentThread().getName() == "MainThread" + +#_SERVER_PID = None +#_PORTAL_PID = None +# #global _SERVER_PID, _PORTAL_PID, _IS_SUBPROCESS, _SELF_PID +# if not _SERVER_PID and not _PORTAL_PID: +# _IS_SUBPROCESS = (_SERVER_PID and _PORTAL_PID) and not _SELF_PID in (_SERVER_PID, _PORTAL_PID) class SharedMemoryModelBase(ModelBase): # CL: upstream had a __new__ method that skipped ModelBase's __new__ if @@ -158,15 +168,22 @@ class SharedMemoryModel(Model): flush_instance_cache = classmethod(flush_instance_cache) def save(cls, *args, **kwargs): - "overload spot for saving" - global _SERVER_PID, _PORTAL_PID, _IS_SUBPROCESS, _SELF_PID - if not _SERVER_PID and not _PORTAL_PID: - _SERVER_PID, _PORTAL_PID = _get_pids() - _IS_SUBPROCESS = (_SERVER_PID and _PORTAL_PID) and (_SERVER_PID != _SELF_PID) and (_PORTAL_PID != _SELF_PID) + "save tracking process/thread issues" + if _IS_SUBPROCESS: - #print "storing in PROC_MODIFIED_OBJS:", cls.db_key, cls.id + # we keep a store of objects modified in subprocesses so + # we know to update their caches in the central process PROC_MODIFIED_OBJS.append(cls) - super(SharedMemoryModel, cls).save(*args, **kwargs) + + if _IS_MAIN_THREAD: + # in main thread - normal operation + super(SharedMemoryModel, cls).save(*args, **kwargs) + else: + # in another thread; make sure to save in reactor thread + def _save_callback(cls, *args, **kwargs): + super(SharedMemoryModel, cls).save(*args, **kwargs) + blockingCallFromThread(reactor, _save_callback, cls, *args, **kwargs) + #callFromThread(_save_callback, cls, *args, **kwargs) # Use a signal so we make sure to catch cascades. def flush_cache(**kwargs): diff --git a/src/utils/utils.py b/src/utils/utils.py index a10d1b473a..503516e5f8 100644 --- a/src/utils/utils.py +++ b/src/utils/utils.py @@ -467,9 +467,9 @@ def delay(delay=2, retval=None, callback=None): """ Delay the return of a value. Inputs: - to_return (any) - this will be returned by this function after a delay delay (int) - the delay in seconds - callback (func(r)) - if given, this will be called with the to_return after delay seconds + retval (any) - this will be returned by this function after a delay + callback (func(retval)) - if given, this will be called with retval after delay seconds Returns: deferred that will fire with to_return after delay seconds """ From 812bdb0f73537a8e057f872595ac472150d0df2c Mon Sep 17 00:00:00 2001 From: Griatch Date: Wed, 22 May 2013 18:40:16 +0200 Subject: [PATCH 02/12] Refactored src/server, splitting out into a portal subdirectory to make it clearer what goes on which "side". --- game/runner.py | 2 +- src/server/portal/__init__.py | 0 src/server/{ => portal}/mccp.py | 2 +- src/server/{ => portal}/msdp.py | 0 src/server/{ => portal}/mssp.py | 0 src/server/{ => portal}/portal.py | 14 +- src/server/portal/portalsessionhandler.py | 167 ++++++++++++++++++++++ src/server/{ => portal}/ssh.py | 0 src/server/{ => portal}/ssl.py | 2 +- src/server/{ => portal}/telnet.py | 4 +- src/server/{ => portal}/ttype.py | 0 src/server/{ => portal}/webclient.py | 0 src/server/sessionhandler.py | 163 --------------------- 13 files changed, 179 insertions(+), 175 deletions(-) create mode 100644 src/server/portal/__init__.py rename src/server/{ => portal}/mccp.py (97%) rename src/server/{ => portal}/msdp.py (100%) rename src/server/{ => portal}/mssp.py (100%) rename src/server/{ => portal}/portal.py (96%) create mode 100644 src/server/portal/portalsessionhandler.py rename src/server/{ => portal}/ssh.py (100%) rename src/server/{ => portal}/ssl.py (98%) rename src/server/{ => portal}/telnet.py (98%) rename src/server/{ => portal}/ttype.py (100%) rename src/server/{ => portal}/webclient.py (100%) diff --git a/game/runner.py b/game/runner.py index f7408c74ce..3bc644065e 100644 --- a/game/runner.py +++ b/game/runner.py @@ -50,7 +50,7 @@ from django.conf import settings # Setup access of the evennia server itself SERVER_PY_FILE = os.path.join(settings.SRC_DIR, 'server/server.py') -PORTAL_PY_FILE = os.path.join(settings.SRC_DIR, 'server/portal.py') +PORTAL_PY_FILE = os.path.join(settings.SRC_DIR, 'server/portal/portal.py') # Get logfile names SERVER_LOGFILE = settings.SERVER_LOG_FILE diff --git a/src/server/portal/__init__.py b/src/server/portal/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/src/server/mccp.py b/src/server/portal/mccp.py similarity index 97% rename from src/server/mccp.py rename to src/server/portal/mccp.py index 9cf7b34d5d..2e6aa77cd3 100644 --- a/src/server/mccp.py +++ b/src/server/portal/mccp.py @@ -2,7 +2,7 @@ MCCP - Mud Client Compression Protocol -The implements the MCCP v2 telnet protocol as per +This implements the MCCP v2 telnet protocol as per http://tintin.sourceforge.net/mccp/. MCCP allows for the server to compress data when sending to supporting clients, reducing bandwidth by 70-90%.. The compression is done using Python's builtin zlib diff --git a/src/server/msdp.py b/src/server/portal/msdp.py similarity index 100% rename from src/server/msdp.py rename to src/server/portal/msdp.py diff --git a/src/server/mssp.py b/src/server/portal/mssp.py similarity index 100% rename from src/server/mssp.py rename to src/server/portal/mssp.py diff --git a/src/server/portal.py b/src/server/portal/portal.py similarity index 96% rename from src/server/portal.py rename to src/server/portal/portal.py index b871bd434e..ca6bbc4d77 100644 --- a/src/server/portal.py +++ b/src/server/portal/portal.py @@ -11,15 +11,15 @@ import sys import os if os.name == 'nt': # For Windows batchfile we need an extra path insertion here. - sys.path.insert(0, os.path.dirname(os.path.dirname( - os.path.dirname(os.path.abspath(__file__))))) + sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.dirname( + os.path.dirname(os.path.abspath(__file__)))))) from twisted.application import internet, service from twisted.internet import protocol, reactor from twisted.web import server, static from django.conf import settings from src.utils.utils import get_evennia_version, mod_import, make_iter -from src.server.sessionhandler import PORTAL_SESSIONS +from src.server.portal.portalsessionhandler import PORTAL_SESSIONS PORTAL_SERVICES_PLUGIN_MODULES = [mod_import(module) for module in make_iter(settings.PORTAL_SERVICES_PLUGIN_MODULES)] @@ -168,7 +168,7 @@ if TELNET_ENABLED: # Start telnet game connections - from src.server import telnet + from src.server.portal import telnet for interface in TELNET_INTERFACES: if ":" in interface: @@ -192,7 +192,7 @@ if SSL_ENABLED: # Start SSL game connection (requires PyOpenSSL). - from src.server import ssl + from src.server.portal import ssl for interface in SSL_INTERFACES: if ":" in interface: @@ -218,7 +218,7 @@ if SSH_ENABLED: # Start SSH game connections. Will create a keypair in evennia/game if necessary. - from src.server import ssh + from src.server.portal import ssh for interface in SSH_INTERFACES: if ":" in interface: @@ -255,7 +255,7 @@ if WEBSERVER_ENABLED: webclientstr = "" if WEBCLIENT_ENABLED: # create ajax client processes at /webclientdata - from src.server.webclient import WebClient + from src.server.portal.webclient import WebClient webclient = WebClient() webclient.sessionhandler = PORTAL_SESSIONS web_root.putChild("webclientdata", webclient) diff --git a/src/server/portal/portalsessionhandler.py b/src/server/portal/portalsessionhandler.py new file mode 100644 index 0000000000..04228933f8 --- /dev/null +++ b/src/server/portal/portalsessionhandler.py @@ -0,0 +1,167 @@ +""" +Sessionhandler for portal sessions +""" +import time +from src.server.sessionhandler import SessionHandler, PCONN, PDISCONN + +#------------------------------------------------------------ +# Portal-SessionHandler class +#------------------------------------------------------------ +class PortalSessionHandler(SessionHandler): + """ + This object holds the sessions connected to the portal at any time. + It is synced with the server's equivalent SessionHandler over the AMP + connection. + + Sessions register with the handler using the connect() method. This + will assign a new unique sessionid to the session and send that sessid + to the server using the AMP connection. + + """ + + def __init__(self): + """ + Init the handler + """ + self.portal = None + self.sessions = {} + self.latest_sessid = 0 + self.uptime = time.time() + self.connection_time = 0 + + def at_server_connection(self): + """ + Called when the Portal establishes connection with the + Server. At this point, the AMP connection is already + established. + """ + self.connection_time = time.time() + + def connect(self, session): + """ + Called by protocol at first connect. This adds a not-yet authenticated session + using an ever-increasing counter for sessid. + """ + self.latest_sessid += 1 + sessid = self.latest_sessid + session.sessid = sessid + sessdata = session.get_sync_data() + self.sessions[sessid] = session + # sync with server-side + self.portal.amp_protocol.call_remote_ServerAdmin(sessid, + operation=PCONN, + data=sessdata) + def disconnect(self, session): + """ + Called from portal side when the connection is closed from the portal side. + """ + sessid = session.sessid + if sessid in self.sessions: + del self.sessions[sessid] + del session + # tell server to also delete this session + self.portal.amp_protocol.call_remote_ServerAdmin(sessid, + operation=PDISCONN) + + def server_disconnect(self, sessid, reason=""): + """ + Called by server to force a disconnect by sessid + """ + session = self.sessions.get(sessid, None) + if session: + session.disconnect(reason) + if sessid in self.sessions: + # in case sess.disconnect doesn't delete it + del self.sessions[sessid] + del session + + def server_disconnect_all(self, reason=""): + """ + Called by server when forcing a clean disconnect for everyone. + """ + for session in self.sessions.values(): + session.disconnect(reason) + del session + self.sessions = {} + + def server_logged_in(self, sessid, data): + "The server tells us that the session has been authenticated. Updated it." + sess = self.get_session(sessid) + sess.load_sync_data(data) + + def server_session_sync(self, serversessions): + """ + Server wants to save data to the portal, maybe because it's about to shut down. + We don't overwrite any sessions here, just update them in-place and remove + any that are out of sync (which should normally not be the case) + + serversessions - dictionary {sessid:{property:value},...} describing the properties + to sync on all sessions + """ + to_save = [sessid for sessid in serversessions if sessid in self.sessions] + to_delete = [sessid for sessid in self.sessions if sessid not in to_save] + # save protocols + for sessid in to_save: + self.sessions[sessid].load_sync_data(serversessions[sessid]) + # disconnect out-of-sync missing protocols + for sessid in to_delete: + self.server_disconnect(sessid) + + def count_loggedin(self, include_unloggedin=False): + """ + Count loggedin connections, alternatively count all connections. + """ + return len(self.get_sessions(include_unloggedin=include_unloggedin)) + + def session_from_suid(self, suid): + """ + Given a session id, retrieve the session (this is primarily + intended to be called by web clients) + """ + return [sess for sess in self.get_sessions(include_unloggedin=True) + if hasattr(sess, 'suid') and sess.suid == suid] + + def data_in(self, session, string="", data=""): + """ + Called by portal sessions for relaying data coming + in from the protocol to the server. data is + serialized before passed on. + """ + #print "portal_data_in:", string + self.portal.amp_protocol.call_remote_MsgPortal2Server(session.sessid, + msg=string, + data=data) + def announce_all(self, message): + """ + Send message to all connection sessions + """ + for session in self.sessions.values(): + session.data_out(message) + + def data_out(self, sessid, string="", data=""): + """ + Called by server for having the portal relay messages and data + to the correct session protocol. + """ + session = self.sessions.get(sessid, None) + if session: + session.data_out(string, data=data) + + def oob_data_in(self, session, data): + """ + OOB (Out-of-band) data Portal -> Server + """ + print "portal_oob_data_in:", data + self.portal.amp_protocol.call_remote_OOBPortal2Server(session.sessid, + data=data) + + def oob_data_out(self, sessid, data): + """ + OOB (Out-of-band) data Server -> Portal + """ + print "portal_oob_data_out:", data + session = self.sessions.get(sessid, None) + if session: + session.oob_data_out(data) + +PORTAL_SESSIONS = PortalSessionHandler() diff --git a/src/server/ssh.py b/src/server/portal/ssh.py similarity index 100% rename from src/server/ssh.py rename to src/server/portal/ssh.py diff --git a/src/server/ssl.py b/src/server/portal/ssl.py similarity index 98% rename from src/server/ssl.py rename to src/server/portal/ssl.py index b3064731e3..238d3bfd8b 100644 --- a/src/server/ssl.py +++ b/src/server/portal/ssl.py @@ -11,7 +11,7 @@ except ImportError: print " SSL_ENABLED requires PyOpenSSL." sys.exit(5) -from src.server.telnet import TelnetProtocol +from src.server.portal.telnet import TelnetProtocol class SSLProtocol(TelnetProtocol): """ diff --git a/src/server/telnet.py b/src/server/portal/telnet.py similarity index 98% rename from src/server/telnet.py rename to src/server/portal/telnet.py index 27e446e56d..0812d305f7 100644 --- a/src/server/telnet.py +++ b/src/server/portal/telnet.py @@ -10,8 +10,8 @@ sessions etc. import re from twisted.conch.telnet import Telnet, StatefulTelnetProtocol, IAC, LINEMODE from src.server.session import Session -from src.server import ttype, mssp -from src.server.mccp import Mccp, mccp_compress, MCCP +from src.server.portal import ttype, mssp +from src.server.portal.mccp import Mccp, mccp_compress, MCCP from src.utils import utils, ansi, logger _RE_N = re.compile(r"\{n$") diff --git a/src/server/ttype.py b/src/server/portal/ttype.py similarity index 100% rename from src/server/ttype.py rename to src/server/portal/ttype.py diff --git a/src/server/webclient.py b/src/server/portal/webclient.py similarity index 100% rename from src/server/webclient.py rename to src/server/portal/webclient.py diff --git a/src/server/sessionhandler.py b/src/server/sessionhandler.py index 34ca786ec8..6554279d31 100644 --- a/src/server/sessionhandler.py +++ b/src/server/sessionhandler.py @@ -380,167 +380,4 @@ class ServerSessionHandler(SessionHandler): """ self.server.amp_protocol.call_remote_OOBServer2Portal(session.sessid, data=data) - -#------------------------------------------------------------ -# Portal-SessionHandler class -#------------------------------------------------------------ - -class PortalSessionHandler(SessionHandler): - """ - This object holds the sessions connected to the portal at any time. - It is synced with the server's equivalent SessionHandler over the AMP - connection. - - Sessions register with the handler using the connect() method. This - will assign a new unique sessionid to the session and send that sessid - to the server using the AMP connection. - - """ - - def __init__(self): - """ - Init the handler - """ - self.portal = None - self.sessions = {} - self.latest_sessid = 0 - self.uptime = time.time() - self.connection_time = 0 - - def at_server_connection(self): - """ - Called when the Portal establishes connection with the - Server. At this point, the AMP connection is already - established. - """ - self.connection_time = time.time() - - def connect(self, session): - """ - Called by protocol at first connect. This adds a not-yet authenticated session - using an ever-increasing counter for sessid. - """ - self.latest_sessid += 1 - sessid = self.latest_sessid - session.sessid = sessid - sessdata = session.get_sync_data() - self.sessions[sessid] = session - # sync with server-side - self.portal.amp_protocol.call_remote_ServerAdmin(sessid, - operation=PCONN, - data=sessdata) - def disconnect(self, session): - """ - Called from portal side when the connection is closed from the portal side. - """ - sessid = session.sessid - if sessid in self.sessions: - del self.sessions[sessid] - del session - # tell server to also delete this session - self.portal.amp_protocol.call_remote_ServerAdmin(sessid, - operation=PDISCONN) - - def server_disconnect(self, sessid, reason=""): - """ - Called by server to force a disconnect by sessid - """ - session = self.sessions.get(sessid, None) - if session: - session.disconnect(reason) - if sessid in self.sessions: - # in case sess.disconnect doesn't delete it - del self.sessions[sessid] - del session - - def server_disconnect_all(self, reason=""): - """ - Called by server when forcing a clean disconnect for everyone. - """ - for session in self.sessions.values(): - session.disconnect(reason) - del session - self.sessions = {} - - def server_logged_in(self, sessid, data): - "The server tells us that the session has been authenticated. Updated it." - sess = self.get_session(sessid) - sess.load_sync_data(data) - - def server_session_sync(self, serversessions): - """ - Server wants to save data to the portal, maybe because it's about to shut down. - We don't overwrite any sessions here, just update them in-place and remove - any that are out of sync (which should normally not be the case) - - serversessions - dictionary {sessid:{property:value},...} describing the properties - to sync on all sessions - """ - to_save = [sessid for sessid in serversessions if sessid in self.sessions] - to_delete = [sessid for sessid in self.sessions if sessid not in to_save] - # save protocols - for sessid in to_save: - self.sessions[sessid].load_sync_data(serversessions[sessid]) - # disconnect out-of-sync missing protocols - for sessid in to_delete: - self.server_disconnect(sessid) - - def count_loggedin(self, include_unloggedin=False): - """ - Count loggedin connections, alternatively count all connections. - """ - return len(self.get_sessions(include_unloggedin=include_unloggedin)) - - def session_from_suid(self, suid): - """ - Given a session id, retrieve the session (this is primarily - intended to be called by web clients) - """ - return [sess for sess in self.get_sessions(include_unloggedin=True) - if hasattr(sess, 'suid') and sess.suid == suid] - - def data_in(self, session, string="", data=""): - """ - Called by portal sessions for relaying data coming - in from the protocol to the server. data is - serialized before passed on. - """ - #print "portal_data_in:", string - self.portal.amp_protocol.call_remote_MsgPortal2Server(session.sessid, - msg=string, - data=data) - def announce_all(self, message): - """ - Send message to all connection sessions - """ - for session in self.sessions.values(): - session.data_out(message) - - def data_out(self, sessid, string="", data=""): - """ - Called by server for having the portal relay messages and data - to the correct session protocol. - """ - session = self.sessions.get(sessid, None) - if session: - session.data_out(string, data=data) - - def oob_data_in(self, session, data): - """ - OOB (Out-of-band) data Portal -> Server - """ - print "portal_oob_data_in:", data - self.portal.amp_protocol.call_remote_OOBPortal2Server(session.sessid, - data=data) - - def oob_data_out(self, sessid, data): - """ - OOB (Out-of-band) data Server -> Portal - """ - print "portal_oob_data_out:", data - session = self.sessions.get(sessid, None) - if session: - session.oob_data_out(data) - SESSIONS = ServerSessionHandler() -PORTAL_SESSIONS = PortalSessionHandler() From 94f50fcf33de43a8f131689b97a9d4216509de3e Mon Sep 17 00:00:00 2001 From: Griatch Date: Thu, 23 May 2013 00:02:25 +0200 Subject: [PATCH 03/12] Moved webserver to Server and set up the webclient to work as a stand-alone program on the Portal side. The website link to the webclient is currently pointing to the wrong process, some way to direct that transparently to the Portal-side is needed. --- src/server/portal/portal.py | 47 ++++++++++++++----------------------- src/server/server.py | 38 +++++++++++++++++++++++++++++- src/web/news/urls.py | 4 ++-- src/web/urls.py | 10 ++++---- src/web/webclient/urls.py | 4 ++-- src/web/webclient/views.py | 14 +++++++---- src/web/website/urls.py | 6 ++--- 7 files changed, 77 insertions(+), 46 deletions(-) diff --git a/src/server/portal/portal.py b/src/server/portal/portal.py index ca6bbc4d77..6f71d6832b 100644 --- a/src/server/portal/portal.py +++ b/src/server/portal/portal.py @@ -16,7 +16,7 @@ if os.name == 'nt': from twisted.application import internet, service from twisted.internet import protocol, reactor -from twisted.web import server, static +from twisted.web import server from django.conf import settings from src.utils.utils import get_evennia_version, mod_import, make_iter from src.server.portal.portalsessionhandler import PORTAL_SESSIONS @@ -240,29 +240,9 @@ if SSH_ENABLED: if WEBSERVER_ENABLED: - # Start a django-compatible webserver. + # Start a reverse proxy to relay data to the Server-side webserver - from twisted.python import threadpool - from src.server.webserver import DjangoWebRoot, WSGIWebServer - - # start a thread pool and define the root url (/) as a wsgi resource - # recognized by Django - threads = threadpool.ThreadPool() - web_root = DjangoWebRoot(threads) - # point our media resources to url /media - web_root.putChild("media", static.File(settings.MEDIA_ROOT)) - - webclientstr = "" - if WEBCLIENT_ENABLED: - # create ajax client processes at /webclientdata - from src.server.portal.webclient import WebClient - webclient = WebClient() - webclient.sessionhandler = PORTAL_SESSIONS - web_root.putChild("webclientdata", webclient) - - webclientstr = "/client" - - web_site = server.Site(web_root, logPath=settings.HTTP_LOG_FILE) + from twisted.web import proxy for interface in WEBSERVER_INTERFACES: if ":" in interface: @@ -273,12 +253,22 @@ if WEBSERVER_ENABLED: ifacestr = "-%s" % interface for port in WEBSERVER_PORTS: pstring = "%s:%s" % (ifacestr, port) - # create the webserver - webserver = WSGIWebServer(threads, port, web_site, interface=interface) - webserver.setName('EvenniaWebServer%s' % pstring) - PORTAL.services.addService(webserver) + web_root = proxy.ReverseProxyResource("localhost", port, '') - print " webserver%s%s: %s" % (webclientstr, ifacestr, port) + webclientstr = "" + if WEBCLIENT_ENABLED: + # create ajax client processes at /webclientdata + from src.server.portal.webclient import WebClient + webclient = WebClient() + webclient.sessionhandler = PORTAL_SESSIONS + web_root.putChild("webclientdata", webclient) + webclientstr = "/client" + + web_root = server.Site(web_root, logPath=settings.HTTP_LOG_FILE) + proxy_service = internet.TCPServer(port+1, web_root) + proxy_service.setName('EvenniaWebProxy%s' % pstring) + PORTAL.services.addService(proxy_service) + print " webproxy%s%s: %s" % (webclientstr, ifacestr, port+1) for plugin_module in PORTAL_SERVICES_PLUGIN_MODULES: # external plugin services to start @@ -286,7 +276,6 @@ for plugin_module in PORTAL_SERVICES_PLUGIN_MODULES: print '-' * 50 # end of terminal output - if os.name == 'nt': # Windows only: Set PID file manually f = open(os.path.join(settings.GAME_DIR, 'portal.pid'), 'w') diff --git a/src/server/server.py b/src/server/server.py index f25de51fb6..8836f1a0f0 100644 --- a/src/server/server.py +++ b/src/server/server.py @@ -14,7 +14,7 @@ if os.name == 'nt': # For Windows batchfile we need an extra path insertion here. sys.path.insert(0, os.path.dirname(os.path.dirname( os.path.dirname(os.path.abspath(__file__))))) - +from twisted.web import server, static from twisted.application import internet, service from twisted.internet import reactor, defer import django @@ -57,10 +57,15 @@ AMP_HOST = settings.AMP_HOST AMP_PORT = settings.AMP_PORT AMP_INTERFACE = settings.AMP_INTERFACE +WEBSERVER_PORTS = settings.WEBSERVER_PORTS +WEBSERVER_INTERFACES = settings.WEBSERVER_INTERFACES + # server-channel mappings +WEBSERVER_ENABLED = settings.WEBSERVER_ENABLED and WEBSERVER_PORTS and WEBSERVER_INTERFACES IMC2_ENABLED = settings.IMC2_ENABLED IRC_ENABLED = settings.IRC_ENABLED RSS_ENABLED = settings.RSS_ENABLED +WEBCLIENT_ENABLED = settings.WEBCLIENT_ENABLED #------------------------------------------------------------ @@ -334,6 +339,37 @@ if AMP_ENABLED: amp_service.setName("EvenniaPortal") EVENNIA.services.addService(amp_service) +if WEBSERVER_ENABLED: + + # Start a django-compatible webserver. + + from twisted.python import threadpool + from src.server.webserver import DjangoWebRoot, WSGIWebServer + + # start a thread pool and define the root url (/) as a wsgi resource + # recognized by Django + threads = threadpool.ThreadPool() + web_root = DjangoWebRoot(threads) + # point our media resources to url /media + web_root.putChild("media", static.File(settings.MEDIA_ROOT)) + web_site = server.Site(web_root, logPath=settings.HTTP_LOG_FILE) + + for interface in WEBSERVER_INTERFACES: + if ":" in interface: + print " iPv6 interfaces not yet supported" + continue + ifacestr = "" + if interface != '0.0.0.0' or len(WEBSERVER_INTERFACES) > 1: + ifacestr = "-%s" % interface + for port in WEBSERVER_PORTS: + pstring = "%s:%s" % (ifacestr, port) + # create the webserver + webserver = WSGIWebServer(threads, port, web_site, interface=interface) + webserver.setName('EvenniaWebServer%s' % pstring) + EVENNIA.services.addService(webserver) + + print " webserver%s: %s" % (ifacestr, port) + if IRC_ENABLED: # IRC channel connections diff --git a/src/web/news/urls.py b/src/web/news/urls.py index 10c891d42c..09c4a3b6fe 100755 --- a/src/web/news/urls.py +++ b/src/web/news/urls.py @@ -1,9 +1,9 @@ """ -This structures the url tree for the news application. +This structures the url tree for the news application. It is imported from the root handler, game.web.urls.py. """ -from django.conf.urls.defaults import * +from django.conf.urls import * urlpatterns = patterns('src.web.news.views', (r'^show/(?P\d+)/$', 'show_news'), diff --git a/src/web/urls.py b/src/web/urls.py index bbe83633d6..5ca3bf8a41 100755 --- a/src/web/urls.py +++ b/src/web/urls.py @@ -6,7 +6,7 @@ # http://diveintopython.org/regular_expressions/street_addresses.html#re.matching.2.3 # -from django.conf.urls.defaults import * +from django.conf.urls import * from django.conf import settings from django.contrib import admin from django.views.generic import RedirectView @@ -17,12 +17,12 @@ from django.db.models.loading import cache as model_cache if not model_cache.loaded: model_cache.get_models() -# loop over all settings.INSTALLED_APPS and execute code in +# loop over all settings.INSTALLED_APPS and execute code in # files named admin.py in each such app (this will add those # models to the admin site) admin.autodiscover() -# Setup the root url tree from / +# Setup the root url tree from / urlpatterns = patterns('', # User Authentication @@ -36,11 +36,11 @@ urlpatterns = patterns('', # Page place-holder for things that aren't implemented yet. url(r'^tbi/', 'src.web.website.views.to_be_implemented'), - + # Admin interface url(r'^admin/doc/', include('django.contrib.admindocs.urls')), url(r'^admin/', include(admin.site.urls)), - + # favicon url(r'^favicon\.ico$', RedirectView.as_view(url='/media/images/favicon.ico')), diff --git a/src/web/webclient/urls.py b/src/web/webclient/urls.py index 93702a0673..03f2595d2e 100644 --- a/src/web/webclient/urls.py +++ b/src/web/webclient/urls.py @@ -1,6 +1,6 @@ """ -This structures the (simple) structure of the -webpage 'application'. +This structures the (simple) structure of the +webpage 'application'. """ from django.conf.urls import * diff --git a/src/web/webclient/views.py b/src/web/webclient/views.py index e9ac9910a3..3d097b6935 100644 --- a/src/web/webclient/views.py +++ b/src/web/webclient/views.py @@ -1,19 +1,25 @@ """ -This contains a simple view for rendering the webclient +This contains a simple view for rendering the webclient page and serve it eventual static content. """ -from django.shortcuts import render_to_response +from django.shortcuts import render_to_response, redirect from django.template import RequestContext from django.conf import settings from src.server.sessionhandler import SESSIONS def webclient(request): """ - Webclient page template loading. - """ + Webclient page template loading. + """ + + # analyze request to find which port we are on + if int(request.META["SERVER_PORT"]) == 8000: + # we relay webclient to the portal port + print "Called from port 8000!" + #return redirect("http://localhost:8001/webclient/", permanent=True) # as an example we send the number of connected players to the template pagevars = {'num_players_connected': SESSIONS.player_count()} diff --git a/src/web/website/urls.py b/src/web/website/urls.py index b12603afe1..8bb4bd811e 100644 --- a/src/web/website/urls.py +++ b/src/web/website/urls.py @@ -1,9 +1,9 @@ """ -This structures the (simple) structure of the -webpage 'application'. +This structures the (simple) structure of the +webpage 'application'. """ -from django.conf.urls.defaults import * +from django.conf.urls import * urlpatterns = patterns('src.web.website.views', (r'^$', 'page_index'), From c083fe6266eddee8db14f6b1cc0b6cb98a289c55 Mon Sep 17 00:00:00 2001 From: Griatch Date: Thu, 23 May 2013 22:58:50 +0200 Subject: [PATCH 04/12] Webserver->Server is working, some more cleanup needed. --- src/server/portal/portal.py | 19 ++++++++++++------- src/server/portal/webclient.py | 10 +++++----- src/server/server.py | 6 +++--- src/settings_default.py | 10 +++++++--- 4 files changed, 27 insertions(+), 18 deletions(-) diff --git a/src/server/portal/portal.py b/src/server/portal/portal.py index 6f71d6832b..a39618f6e3 100644 --- a/src/server/portal/portal.py +++ b/src/server/portal/portal.py @@ -55,7 +55,8 @@ WEBCLIENT_ENABLED = settings.WEBCLIENT_ENABLED AMP_HOST = settings.AMP_HOST AMP_PORT = settings.AMP_PORT -AMP_ENABLED = AMP_HOST and AMP_PORT +AMP_INTERFACE = settings.AMP_INTERFACE +AMP_ENABLED = AMP_HOST and AMP_PORT and AMP_INTERFACE #------------------------------------------------------------ @@ -156,6 +157,11 @@ if AMP_ENABLED: from src.server import amp + ifacestr = "" + if AMP_HOST != '127.0.0.1': + ifacestr = "-%s" % AMP_HOST + print ' amp (to Server)%s: %s' % (ifacestr, AMP_PORT) + factory = amp.AmpClientFactory(PORTAL) amp_client = internet.TCPClient(AMP_HOST, AMP_PORT, factory) amp_client.setName('evennia_amp') @@ -251,10 +257,9 @@ if WEBSERVER_ENABLED: ifacestr = "" if interface != '0.0.0.0' or len(WEBSERVER_INTERFACES) > 1: ifacestr = "-%s" % interface - for port in WEBSERVER_PORTS: - pstring = "%s:%s" % (ifacestr, port) - web_root = proxy.ReverseProxyResource("localhost", port, '') - + for proxyport, serverport in WEBSERVER_PORTS: + pstring = "%s:%s<->%s" % (ifacestr, proxyport, serverport) + web_root = proxy.ReverseProxyResource('127.0.0.1', serverport, '') webclientstr = "" if WEBCLIENT_ENABLED: # create ajax client processes at /webclientdata @@ -265,10 +270,10 @@ if WEBSERVER_ENABLED: webclientstr = "/client" web_root = server.Site(web_root, logPath=settings.HTTP_LOG_FILE) - proxy_service = internet.TCPServer(port+1, web_root) + proxy_service = internet.TCPServer(proxyport, web_root, interface=interface) proxy_service.setName('EvenniaWebProxy%s' % pstring) PORTAL.services.addService(proxy_service) - print " webproxy%s%s: %s" % (webclientstr, ifacestr, port+1) + print " webproxy%s%s:%s (<-> %s)" % (webclientstr, ifacestr, proxyport, serverport) for plugin_module in PORTAL_SERVICES_PLUGIN_MODULES: # external plugin services to start diff --git a/src/server/portal/webclient.py b/src/server/portal/webclient.py index afdbd49f2c..746d20d749 100644 --- a/src/server/portal/webclient.py +++ b/src/server/portal/webclient.py @@ -63,11 +63,11 @@ class WebClient(resource.Resource): self.requests = {} self.databuffer = {} - def getChild(self, path, request): - """ - This is the place to put dynamic content. - """ - return self + #def getChild(self, path, request): + # """ + # This is the place to put dynamic content. + # """ + # return self def _responseFailed(self, failure, suid, request): "callback if a request is lost/timed out" diff --git a/src/server/server.py b/src/server/server.py index 8836f1a0f0..1e33daf04a 100644 --- a/src/server/server.py +++ b/src/server/server.py @@ -330,7 +330,7 @@ if AMP_ENABLED: ifacestr = "" if AMP_INTERFACE != '127.0.0.1': ifacestr = "-%s" % AMP_INTERFACE - print ' amp (to Portal)%s:%s' % (ifacestr, AMP_PORT) + print ' amp (to Portal)%s: %s' % (ifacestr, AMP_PORT) from src.server import amp @@ -361,9 +361,9 @@ if WEBSERVER_ENABLED: ifacestr = "" if interface != '0.0.0.0' or len(WEBSERVER_INTERFACES) > 1: ifacestr = "-%s" % interface - for port in WEBSERVER_PORTS: + for proxyport, port in WEBSERVER_PORTS: + # create the webserver (we only need the port for this) pstring = "%s:%s" % (ifacestr, port) - # create the webserver webserver = WSGIWebServer(threads, port, web_site, interface=interface) webserver.setName('EvenniaWebServer%s' % pstring) EVENNIA.services.addService(webserver) diff --git a/src/settings_default.py b/src/settings_default.py index 626d559804..2e4bbed358 100644 --- a/src/settings_default.py +++ b/src/settings_default.py @@ -44,8 +44,12 @@ WEBSERVER_ENABLED = True # attacks. It defaults to allowing all. In production, make # sure to change this to your actual host addresses/IPs. ALLOWED_HOSTS = ["*"] -# A list of ports the Evennia webserver listens on -WEBSERVER_PORTS = [8000] +# The webserver sits behind a Portal proxy. This is a list +# of tuples (proxyport,serverport) used. The proxyports are what +# the Portal proxy presents to the world. The serverports are +# the internal ports the proxy uses to forward data to the Server-side +# webserver (these should not be publicly open) +WEBSERVER_PORTS = [(8000, 5001)] # Interface addresses to listen to. If 0.0.0.0, listen to all. WEBSERVER_INTERFACES = ['0.0.0.0'] # IP addresses that may talk to the server in a reverse proxy configuration, @@ -111,7 +115,7 @@ ENCODINGS = ["utf-8", "latin-1", "ISO-8859-1"] # communicate with it. This is an internal functionality of Evennia, usually # operating between two processes on the same machine. You usually don't need to # change this unless you cannot use the default AMP port/host for whatever reason. -AMP_HOST = 'localhost' +AMP_HOST = '127.0.0.1' AMP_PORT = 5000 AMP_INTERFACE = '127.0.0.1' # Caching speeds up all forms of database access, often considerably. There From 20e093aa26a15df8cfa8f381164a9550f45c9ace Mon Sep 17 00:00:00 2001 From: Griatch Date: Thu, 23 May 2013 23:42:55 +0200 Subject: [PATCH 05/12] Settings and server configs updated. The caches between server and website threads are still not properly addressed - a change in the admin interface does not immediately translate to a change in-game. --- game/evennia.py | 1 + src/server/portal/portal.py | 5 +---- src/settings_default.py | 2 +- src/utils/idmapper/base.py | 5 +++-- 4 files changed, 6 insertions(+), 7 deletions(-) diff --git a/game/evennia.py b/game/evennia.py index a42835b352..958786d32f 100755 --- a/game/evennia.py +++ b/game/evennia.py @@ -403,6 +403,7 @@ def error_check_python_modules(): deprstring = "settings.%s should be renamed to %s. If defaults are used, their path/classname must be updated (see src/settings_default.py)." if hasattr(settings, "CMDSET_DEFAULT"): raise DeprecationWarning(deprstring % ("CMDSET_DEFAULT", "CMDSET_CHARACTER")) if hasattr(settings, "CMDSET_OOC"): raise DeprecationWarning(deprstring % ("CMDSET_OOC", "CMDSET_PLAYER")) + if settings.WEBSERVER_ENABLED and not isinstance(settings.WEBSERVER_PORTS[0], tuple): raise DeprecationWarning("settings.WEBSERVER_PORTS must be on the form [(proxyport, serverport), ...]") from src.commands import cmdsethandler if not cmdsethandler.import_cmdset(settings.CMDSET_UNLOGGEDIN, None): print "Warning: CMDSET_UNLOGGED failed to load!" diff --git a/src/server/portal/portal.py b/src/server/portal/portal.py index a39618f6e3..be5b7c1f4b 100644 --- a/src/server/portal/portal.py +++ b/src/server/portal/portal.py @@ -157,10 +157,7 @@ if AMP_ENABLED: from src.server import amp - ifacestr = "" - if AMP_HOST != '127.0.0.1': - ifacestr = "-%s" % AMP_HOST - print ' amp (to Server)%s: %s' % (ifacestr, AMP_PORT) + print ' amp (to Server): %s' % AMP_PORT factory = amp.AmpClientFactory(PORTAL) amp_client = internet.TCPClient(AMP_HOST, AMP_PORT, factory) diff --git a/src/settings_default.py b/src/settings_default.py index 2e4bbed358..13cc9e620e 100644 --- a/src/settings_default.py +++ b/src/settings_default.py @@ -115,7 +115,7 @@ ENCODINGS = ["utf-8", "latin-1", "ISO-8859-1"] # communicate with it. This is an internal functionality of Evennia, usually # operating between two processes on the same machine. You usually don't need to # change this unless you cannot use the default AMP port/host for whatever reason. -AMP_HOST = '127.0.0.1' +AMP_HOST = 'localhost' AMP_PORT = 5000 AMP_INTERFACE = '127.0.0.1' # Caching speeds up all forms of database access, often considerably. There diff --git a/src/utils/idmapper/base.py b/src/utils/idmapper/base.py index c2a7a01280..df110b4ad6 100755 --- a/src/utils/idmapper/base.py +++ b/src/utils/idmapper/base.py @@ -180,10 +180,11 @@ class SharedMemoryModel(Model): super(SharedMemoryModel, cls).save(*args, **kwargs) else: # in another thread; make sure to save in reactor thread + print "saving in another thread!" def _save_callback(cls, *args, **kwargs): super(SharedMemoryModel, cls).save(*args, **kwargs) - blockingCallFromThread(reactor, _save_callback, cls, *args, **kwargs) - #callFromThread(_save_callback, cls, *args, **kwargs) + #blockingCallFromThread(reactor, _save_callback, cls, *args, **kwargs) + callFromThread(_save_callback, cls, *args, **kwargs) # Use a signal so we make sure to catch cascades. def flush_cache(**kwargs): From 70ea21dab89a1d06ef18e2b51c3855ac2a5ed7ab Mon Sep 17 00:00:00 2001 From: Griatch Date: Thu, 23 May 2013 23:57:21 +0200 Subject: [PATCH 06/12] Added settings for webserver threadpool limits. --- src/server/server.py | 23 ++++++++--------------- src/settings_default.py | 4 ++++ 2 files changed, 12 insertions(+), 15 deletions(-) diff --git a/src/server/server.py b/src/server/server.py index 1e33daf04a..5492bdecbc 100644 --- a/src/server/server.py +++ b/src/server/server.py @@ -348,27 +348,20 @@ if WEBSERVER_ENABLED: # start a thread pool and define the root url (/) as a wsgi resource # recognized by Django - threads = threadpool.ThreadPool() + threads = threadpool.ThreadPool(minthreads=max(1, settings.WEBSERVER_THREADPOOL_LIMITS[0]), + maxthreads=max(1, settings.WEBSERVER_THREADPOOL_LIMITS[1])) web_root = DjangoWebRoot(threads) # point our media resources to url /media web_root.putChild("media", static.File(settings.MEDIA_ROOT)) web_site = server.Site(web_root, logPath=settings.HTTP_LOG_FILE) - for interface in WEBSERVER_INTERFACES: - if ":" in interface: - print " iPv6 interfaces not yet supported" - continue - ifacestr = "" - if interface != '0.0.0.0' or len(WEBSERVER_INTERFACES) > 1: - ifacestr = "-%s" % interface - for proxyport, port in WEBSERVER_PORTS: - # create the webserver (we only need the port for this) - pstring = "%s:%s" % (ifacestr, port) - webserver = WSGIWebServer(threads, port, web_site, interface=interface) - webserver.setName('EvenniaWebServer%s' % pstring) - EVENNIA.services.addService(webserver) + for proxyport, serverport in WEBSERVER_PORTS: + # create the webserver (we only need the port for this) + webserver = WSGIWebServer(threads, serverport, web_site, interface='127.0.0.1') + webserver.setName('EvenniaWebServer%s' % serverport) + EVENNIA.services.addService(webserver) - print " webserver%s: %s" % (ifacestr, port) + print " webserver: %s" % serverport if IRC_ENABLED: diff --git a/src/settings_default.py b/src/settings_default.py index 13cc9e620e..745a8c4a62 100644 --- a/src/settings_default.py +++ b/src/settings_default.py @@ -55,6 +55,10 @@ WEBSERVER_INTERFACES = ['0.0.0.0'] # IP addresses that may talk to the server in a reverse proxy configuration, # like NginX. UPSTREAM_IPS = ['127.0.0.1'] +# The webserver uses threadpool for handling requests. This will scale +# with server load. Set the minimum and maximum number of threads it +# may use as (min, max) (must be > 0) +WEBSERVER_THREADPOOL_LIMITS = (1, 20) # Start the evennia ajax client on /webclient # (the webserver must also be running) WEBCLIENT_ENABLED = True From 0a39b8f65ebb0a3f4207fbdc414dc62f539a8ce1 Mon Sep 17 00:00:00 2001 From: Griatch Date: Fri, 24 May 2013 21:10:31 +0200 Subject: [PATCH 07/12] Webserver move to Server is finished and everything seems to work normally. --- src/objects/admin.py | 3 ++- src/utils/idmapper/base.py | 1 - 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/src/objects/admin.py b/src/objects/admin.py index 81d3f96501..cf9a66449d 100644 --- a/src/objects/admin.py +++ b/src/objects/admin.py @@ -80,7 +80,7 @@ class ObjectDBAdmin(admin.ModelAdmin): ) #deactivated temporarily, they cause empty objects to be created in admin - inlines = [AliasInline]#, ObjAttributeInline] + #inlines = [AliasInline, ObjAttributeInline] # Custom modification to give two different forms wether adding or not. @@ -111,6 +111,7 @@ class ObjectDBAdmin(admin.ModelAdmin): return super(ObjectDBAdmin, self).get_form(request, obj, **defaults) def save_model(self, request, obj, form, change): + obj.save() if not change: # adding a new object obj = obj.typeclass diff --git a/src/utils/idmapper/base.py b/src/utils/idmapper/base.py index df110b4ad6..5e6032c60a 100755 --- a/src/utils/idmapper/base.py +++ b/src/utils/idmapper/base.py @@ -180,7 +180,6 @@ class SharedMemoryModel(Model): super(SharedMemoryModel, cls).save(*args, **kwargs) else: # in another thread; make sure to save in reactor thread - print "saving in another thread!" def _save_callback(cls, *args, **kwargs): super(SharedMemoryModel, cls).save(*args, **kwargs) #blockingCallFromThread(reactor, _save_callback, cls, *args, **kwargs) From deafb9c544799774aa2c56cee44b1bc321530e8f Mon Sep 17 00:00:00 2001 From: Griatch Date: Sat, 25 May 2013 10:20:26 +0200 Subject: [PATCH 08/12] Added cache-cleaning to the idmapper base class. This works to sync most changes from the admin site-side, but not location changes (which leaves the content cache stale). Thinking of testing the django in-built cache framework instead, maybe using custom fields? --- src/server/caches.py | 29 +++++++++++++++++++++++++---- src/utils/dbserialize.py | 1 + src/utils/idmapper/base.py | 2 ++ 3 files changed, 28 insertions(+), 4 deletions(-) diff --git a/src/server/caches.py b/src/server/caches.py index d311b19fb4..16f2fbd09f 100644 --- a/src/server/caches.py +++ b/src/server/caches.py @@ -6,6 +6,8 @@ Central caching module. from sys import getsizeof from collections import defaultdict from django.conf import settings +from src.server.models import ServerConfig +from src.utils.utils import to_str, uses_database _ENABLE_LOCAL_CACHES = settings.GAME_CACHE_TYPE @@ -13,6 +15,12 @@ _GA = object.__getattribute__ _SA = object.__setattr__ _DA = object.__delattr__ +if uses_database("mysql") and ServerConfig.objects.get_mysql_db_version() < '5.6.4': + # mysql <5.6.4 don't support millisecond precision + _DATESTRING = "%Y:%m:%d-%H:%M:%S:000000" +else: + _DATESTRING = "%Y:%m:%d-%H:%M:%S:%f" + # OOB hooks (OOB not yet functional, don't use yet) _OOB_FIELD_UPDATE_HOOKS = defaultdict(dict) _OOB_PROP_UPDATE_HOOKS = defaultdict(dict) @@ -35,12 +43,12 @@ def hashid(obj): hid = _GA(obj, "_hashid") except AttributeError: try: - date, idnum = _GA(obj, "db_date_created"), _GA(obj, "id") + date, idnum = _GA(obj, "db_date_created").strftime(_DATESTRING), _GA(obj, "id") except AttributeError: try: # maybe a typeclass, try to go to dbobj obj = _GA(obj, "dbobj") - date, idnum = _GA(obj, "db_date_created"), _GA(obj, "id") + date, idnum = _GA(obj, "db_date_created").strftime(_DATESTRING), _GA(obj, "id") except AttributeError: # this happens if hashing something like ndb. We have to # rely on memory adressing in this case. @@ -194,7 +202,10 @@ if _ENABLE_LOCAL_CACHES: hid = hashid(obj) global _FIELD_CACHE if hid: - del _FIELD_CACHE[hashid(obj)] + try: + del _FIELD_CACHE[hashid(obj)] + except KeyError, e: + pass else: # clean cache completely _FIELD_CACHE = defaultdict(dict) @@ -241,7 +252,10 @@ if _ENABLE_LOCAL_CACHES: hid = hashid(obj) global _PROP_CACHE if hid: - del _PROP_CACHE[hashid(obj)] + try: + del _PROP_CACHE[hid] + except KeyError,e: + pass else: # clean cache completely _PROP_CACHE = defaultdict(dict) @@ -295,6 +309,13 @@ if _ENABLE_LOCAL_CACHES: _ATTR_CACHE = defaultdict(dict) + def flush_obj_caches(obj=None): + "Clean all caches on this object" + flush_field_cache(obj) + flush_prop_cache(obj) + flush_attr_cache(obj) + + else: # local caches disabled. Use simple pass-through replacements diff --git a/src/utils/dbserialize.py b/src/utils/dbserialize.py index 0c24908480..44c1d36a1d 100644 --- a/src/utils/dbserialize.py +++ b/src/utils/dbserialize.py @@ -28,6 +28,7 @@ except ImportError: from django.db import transaction from django.core.exceptions import ObjectDoesNotExist from django.contrib.contenttypes.models import ContentType +from src.server.models import ServerConfig from src.utils.utils import to_str, uses_database from src.utils import logger diff --git a/src/utils/idmapper/base.py b/src/utils/idmapper/base.py index 5e6032c60a..e70f59d8c7 100755 --- a/src/utils/idmapper/base.py +++ b/src/utils/idmapper/base.py @@ -210,6 +210,8 @@ def update_cached_instance(sender, instance, **kwargs): if not hasattr(instance, 'cache_instance'): return sender.cache_instance(instance) + from src.server.caches import flush_obj_caches + flush_obj_caches(instance) post_save.connect(update_cached_instance) def cache_size(mb=True): From 8202dba5965d97609f665f2d26d2498f9e50ac80 Mon Sep 17 00:00:00 2001 From: Griatch Date: Wed, 29 May 2013 16:16:28 +0200 Subject: [PATCH 09/12] Changed cache system to use Django's cache mechanism. Changed field caches to make use of Django signalling instead of custom caching calls (this should make the system consistent also when called from the webserver). Created a wrapper system for easily wrapping fields with a default wrapper (so as to not have to explicitly define the properties (such as objdb.key) which all just do the same thing - load from the field and make sure to call save(). --- src/server/caches.py | 706 +++++++++++++++++++++---------------- src/server/server.py | 6 + src/settings_default.py | 25 +- src/typeclasses/models.py | 32 +- src/utils/idmapper/base.py | 55 ++- 5 files changed, 490 insertions(+), 334 deletions(-) diff --git a/src/server/caches.py b/src/server/caches.py index 16f2fbd09f..d5ad57d6b5 100644 --- a/src/server/caches.py +++ b/src/server/caches.py @@ -2,14 +2,8 @@ Central caching module. """ - -from sys import getsizeof -from collections import defaultdict -from django.conf import settings from src.server.models import ServerConfig -from src.utils.utils import to_str, uses_database - -_ENABLE_LOCAL_CACHES = settings.GAME_CACHE_TYPE +from src.utils.utils import uses_database, to_str _GA = object.__getattribute__ _SA = object.__setattr__ @@ -21,16 +15,8 @@ if uses_database("mysql") and ServerConfig.objects.get_mysql_db_version() < '5.6 else: _DATESTRING = "%Y:%m:%d-%H:%M:%S:%f" -# OOB hooks (OOB not yet functional, don't use yet) -_OOB_FIELD_UPDATE_HOOKS = defaultdict(dict) -_OOB_PROP_UPDATE_HOOKS = defaultdict(dict) -_OOB_ATTR_UPDATE_HOOKS = defaultdict(dict) -_OOB_NDB_UPDATE_HOOKS = defaultdict(dict) -_OOB_CUSTOM_UPDATE_HOOKS = defaultdict(dict) -_OOB_HANDLER = None # set by oob handler when it initializes - -def hashid(obj): +def hashid(obj, suffix=""): """ Returns a per-class unique that combines the object's class name with its idnum and creation time. This makes this id unique also @@ -57,301 +43,425 @@ def hashid(obj): # this will happen if setting properties on an object which is not yet saved return None # build the hashid - hid = "%s-%s-#%s" % (_GA(obj, "__class__"), date, idnum) + hid = "%s-%s-#%s%s" % (_GA(obj, "__class__"), date, idnum, suffix) + hid = hid.replace(" ", "") _SA(obj, "_hashid", hid) - return hid + return to_str(hid) -# oob helper functions -def register_oob_update_hook(obj,name, entity="field"): + +# signal handlers + +from django.core.cache import get_cache +#from django.db.models.signals import pre_save, pre_delete, post_init + +# field cache + +_FIELD_CACHE = get_cache("field_cache") +if not _FIELD_CACHE: + raise RuntimeError("settings.CACHE does not contain a 'field_cache' entry!") + +# callback before saving an object + +def field_pre_save(sender, **kwargs): """ - Register hook function to be called when field/property/db/ndb is updated. - Given function will be called with function(obj, entityname, newvalue, *args, **kwargs) - entity - one of "field", "property", "db", "ndb" or "custom" + Called at the beginning of the save operation. The save method + must be called with the update_fields keyword in order to """ - hid = hashid(obj) - if hid: - if entity == "field": - global _OOB_FIELD_UPDATE_HOOKS - _OOB_FIELD_UPDATE_HOOKS[hid][name] = True - return - elif entity == "property": - global _OOB_PROP_UPDATE_HOOKS - _OOB_PROP_UPDATE_HOOKS[hid][name] = True - elif entity == "db": - global _OOB_ATTR_UPDATE_HOOKS - _OOB_ATTR_UPDATE_HOOKS[hid][name] = True - elif entity == "ndb": - global _OOB_NDB_UPDATE_HOOKS - _OOB_NDB_UPDATE_HOOKS[hid][name] = True - elif entity == "custom": - global _OOB_CUSTOM_UPDATE_HOOKS - _OOB_CUSTOM_UPDATE_HOOKS[hid][name] = True - else: - return None + global _FIELD_CACHE -def unregister_oob_update_hook(obj, name, entity="property"): - """ - Un-register a report hook - """ - hid = hashid(obj) - if hid: - global _OOB_FIELD_UPDATE_HOOKS,_OOB_PROP_UPDATE_HOOKS, _OOB_ATTR_UPDATE_HOOKS - global _OOB_CUSTOM_UPDATE_HOOKS, _OOB_NDB_UPDATE_HOOKS - if entity == "field" and name in _OOB_FIELD_UPDATE_HOOKS: - del _OOB_FIELD_UPDATE_HOOKS[hid][name] - elif entity == "property" and name in _OOB_PROP_UPDATE_HOOKS: - del _OOB_PROP_UPDATE_HOOKS[hid][name] - elif entity == "db" and name in _OOB_ATTR_UPDATE_HOOKS: - del _OOB_ATTR_UPDATE_HOOKS[hid][name] - elif entity == "ndb" and name in _OOB_NDB_UPDATE_HOOKS: - del _OOB_NDB_UPDATE_HOOKS[hid][name] - elif entity == "custom" and name in _OOB_CUSTOM_UPDATE_HOOKS: - del _OOB_CUSTOM_UPDATE_HOOKS[hid][name] - else: - return None - -def call_ndb_hooks(obj, attrname, value): - """ - No caching is done of ndb here, but - we use this as a way to call OOB hooks. - """ - hid = hashid(obj) - if hid: - oob_hook = _OOB_NDB_UPDATE_HOOKS[hid].get(attrname) - if oob_hook: - oob_hook[0](obj.typeclass, attrname, value, *oob_hook[1], **oob_hook[2]) - -def call_custom_hooks(obj, attrname, value): - """ - Custom handler for developers adding their own oob hooks, e.g. to - custom typeclass properties. - """ - hid = hashid(obj) - if hid: - oob_hook = _OOB_CUSTOM_UPDATE_HOOKS[hid].get(attrname) - if oob_hook: - oob_hook[0](obj.typeclass, attrname, value, *oob_hook[1], **oob_hook[2]) - - -if _ENABLE_LOCAL_CACHES: - - # Cache stores - _ATTR_CACHE = defaultdict(dict) - _FIELD_CACHE = defaultdict(dict) - _PROP_CACHE = defaultdict(dict) - - - def get_cache_sizes(): - """ - Get cache sizes, expressed in number of objects and memory size in MB - """ - global _ATTR_CACHE, _FIELD_CACHE, _PROP_CACHE - - attr_n = sum(len(dic) for dic in _ATTR_CACHE.values()) - attr_mb = sum(sum(getsizeof(obj) for obj in dic.values()) for dic in _ATTR_CACHE.values()) / 1024.0 - - field_n = sum(len(dic) for dic in _FIELD_CACHE.values()) - field_mb = sum(sum([getsizeof(obj) for obj in dic.values()]) for dic in _FIELD_CACHE.values()) / 1024.0 - - prop_n = sum(len(dic) for dic in _PROP_CACHE.values()) - prop_mb = sum(sum([getsizeof(obj) for obj in dic.values()]) for dic in _PROP_CACHE.values()) / 1024.0 - - return (attr_n, attr_mb), (field_n, field_mb), (prop_n, prop_mb) - - # on-object database field cache - def get_field_cache(obj, name): - "On-model Cache handler." - global _FIELD_CACHE - hid = hashid(obj) - if hid: - try: - return _FIELD_CACHE[hid][name] - except KeyError: - val = _GA(obj, "db_%s" % name) - _FIELD_CACHE[hid][name] = val - return val - return _GA(obj, "db_%s" % name) - - def set_field_cache(obj, name, val): - "On-model Cache setter. Also updates database." - _SA(obj, "db_%s" % name, val) - _GA(obj, "save")() - hid = hashid(obj) - if hid: - global _FIELD_CACHE - _FIELD_CACHE[hid][name] = val - # oob hook functionality - if _OOB_FIELD_UPDATE_HOOKS[hid].get(name): - _OOB_HANDLER.update(hid, name, val) - - def del_field_cache(obj, name): - "On-model cache deleter" - hid = hashid(obj) - _SA(obj, "db_%s" % name, None) - _GA(obj, "save")() - if hid: - try: - del _FIELD_CACHE[hid][name] - except KeyError: - pass - if _OOB_FIELD_UPDATE_HOOKS[hid].get(name): - _OOB_HANDLER.update(hid, name, None) - - def flush_field_cache(obj=None): - "On-model cache resetter" - hid = hashid(obj) - global _FIELD_CACHE - if hid: - try: - del _FIELD_CACHE[hashid(obj)] - except KeyError, e: - pass - else: - # clean cache completely - _FIELD_CACHE = defaultdict(dict) - - # on-object property cache (unrelated to database) - # Note that the get/set_prop_cache handler do not actually - # get/set the property "on" the object but only reads the - # value to/from the cache. This is intended to be used - # with a get/setter property on the object. - - def get_prop_cache(obj, name, default=None): - "On-model Cache handler." - global _PROP_CACHE - hid = hashid(obj) - if hid: - try: - val = _PROP_CACHE[hid][name] - except KeyError: - return default - _PROP_CACHE[hid][name] = val - return val - return default - - def set_prop_cache(obj, name, val): - "On-model Cache setter. Also updates database." - hid = hashid(obj) - if hid: - global _PROP_CACHE - _PROP_CACHE[hid][name] = val - # oob hook functionality - oob_hook = _OOB_PROP_UPDATE_HOOKS[hid].get(name) - if oob_hook: - oob_hook[0](obj.typeclass, name, val, *oob_hook[1], **oob_hook[2]) - - - def del_prop_cache(obj, name): - "On-model cache deleter" + if kwargs.pop("raw", False): return + instance = kwargs.pop("instance") + fields = kwargs.pop("update_fields", None) + if fields: + # this is a list of strings at this point. We want field objects + fields = (instance._meta.get_field_by_name(field)[0] for field in fields) + else: + # meta.fields are already field objects + fields = instance._meta.fields + for field in fields: + fieldname = field.name + new_value = field.value_from_object(instance) + handlername = "_%s_handler" % fieldname try: - del _PROP_CACHE[hashid(obj)][name] - except KeyError: - pass - def flush_prop_cache(obj=None): - "On-model cache resetter" - hid = hashid(obj) - global _PROP_CACHE + handler = _GA(instance, handlername) + except AttributeError: + handler = None + hid = hashid(instance, "-%s" % fieldname) + if callable(handler): + old_value = _FIELD_CACHE.get(hid) if hid else None + # the handler may modify the stored value in various ways + # don't catch exceptions, the handler must work! + new_value = handler(instance, new_value, oldval=old_value) + # we re-assign this to the field, save() will pick it up from there + _SA(instance, fieldname, new_value) if hid: - try: - del _PROP_CACHE[hid] - except KeyError,e: - pass - else: - # clean cache completely - _PROP_CACHE = defaultdict(dict) + # update cache + _FIELD_CACHE.set(hid, new_value) - # attribute cache +# goes into server: +#pre_save.connect(field_pre_save, dispatch_uid="fieldcache") - def get_attr_cache(obj, attrname): - """ - Attribute cache store - """ - return _ATTR_CACHE[hashid(obj)].get(attrname, None) - - def set_attr_cache(obj, attrname, attrobj): - """ - Cache an attribute object - """ - hid = hashid(obj) - if hid: - global _ATTR_CACHE - _ATTR_CACHE[hid][attrname] = attrobj - # oob hook functionality - oob_hook = _OOB_ATTR_UPDATE_HOOKS[hid].get(attrname) - if oob_hook: - oob_hook[0](obj.typeclass, attrname, attrobj.value, *oob_hook[1], **oob_hook[2]) - - def del_attr_cache(obj, attrname): - """ - Remove attribute from cache - """ - global _ATTR_CACHE - try: - _ATTR_CACHE[hashid(obj)][attrname].no_cache = True - del _ATTR_CACHE[hashid(obj)][attrname] - except KeyError: - pass - - def flush_attr_cache(obj=None): - """ - Flush the attribute cache for this object. - """ - global _ATTR_CACHE - if obj: - for attrobj in _ATTR_CACHE[hashid(obj)].values(): - attrobj.no_cache = True - del _ATTR_CACHE[hashid(obj)] - else: - # clean cache completely - for objcache in _ATTR_CACHE.values(): - for attrobj in objcache.values(): - attrobj.no_cache = True - _ATTR_CACHE = defaultdict(dict) +## attr cache - caching the attribute objects related to a given object to +## avoid lookups more than necessary (this makes attributes en par in speed +## to any property). The signal is triggered by the Attribute itself when it +## is created or deleted (it holds a reference to the object) +# +#_ATTR_CACHE = get_cache("attr_cache") +#if not _ATTR_CACHE: +# raise RuntimeError("settings.CACHE does not contain an 'attr_cache' entry!") +# +#def attr_post_init(sender, **kwargs): +# "Called when attribute is created or retrieved in connection with obj." +# hid = hashid(sender.db_obj, "-%s" % sender.db_key) +# _ATTR_CACHE.set(hid, sender) +#def attr_pre_delete(sender, **kwargs): +# "Called when attribute is deleted (del_attribute)" +# hid = hashid(sender.db_obj, "-%s" % sender.db_key) +# _ATTR_CACHE.delete(hid) +# +### goes into server: +#from src.objects.models import ObjAttribute +#from src.scripts.models import ScriptAttribute +#from src.players.models import PlayerAttribute +#post_init.connect(attr_post_init, sender=ObjAttribute, dispatch_uid="objattrcache") +#post_init.connect(attr_post_init, sender=ScriptAttribute, dispatch_uid="scriptattrcache") +#post_init.connect(attr_post_init, sender=PlayerAttribute, dispatch_uid="playerattrcache") +#pre_delete.connect(attr_pre_delete, sender=ObjAttribute, dispatch_uid="objattrcache") +#pre_delete.connect(attr_pre_delete, sender=ScriptAttribute, dispatch_uid="scriptattrcache") +#pre_delete.connect(attr_pre_delete, sender=PlayerAttribute, dispatch_uid="playerattrcache") +# +# +## property cache - this doubles as a central cache and as a way +## to trigger oob on such changes. +# +#from django.dispatch import Signal +#_PROP_CACHE = get_cache("prop_cache") +#if not _PROP_CACHE: +# raise RuntimeError("settings.CACHE does not contain a 'prop_cache' entry!") +# +#PROP_POST_UPDATE = Signal(providing_args=["propname", "propvalue"]) +# +#def prop_update(sender, **kwargs): +# "Called when a propery is updated. kwargs are propname and propvalue." +# propname, propvalue = kwargs.pop("propname", None), kwargs.pop("propvalue", None) +# if propname == None: return +# hid = hashid(sender, "-%s" % propname) +# _PROP_CACHE.set(hid, propvalue) +# +#PROP_POST_UPDATE.connect(prop_update, dispatch_uid="propcache") +# +# - def flush_obj_caches(obj=None): - "Clean all caches on this object" - flush_field_cache(obj) - flush_prop_cache(obj) - flush_attr_cache(obj) -else: + + + + + +#_ENABLE_LOCAL_CACHES = settings.GAME_CACHE_TYPE +## oob helper functions +# OOB hooks (OOB not yet functional, don't use yet) +#_OOB_FIELD_UPDATE_HOOKS = defaultdict(dict) +#_OOB_PROP_UPDATE_HOOKS = defaultdict(dict) +#_OOB_ATTR_UPDATE_HOOKS = defaultdict(dict) +#_OOB_NDB_UPDATE_HOOKS = defaultdict(dict) +#_OOB_CUSTOM_UPDATE_HOOKS = defaultdict(dict) +# +#_OOB_HANDLER = None # set by oob handler when it initializes +#def register_oob_update_hook(obj,name, entity="field"): +# """ +# Register hook function to be called when field/property/db/ndb is updated. +# Given function will be called with function(obj, entityname, newvalue, *args, **kwargs) +# entity - one of "field", "property", "db", "ndb" or "custom" +# """ +# hid = hashid(obj) +# if hid: +# if entity == "field": +# global _OOB_FIELD_UPDATE_HOOKS +# _OOB_FIELD_UPDATE_HOOKS[hid][name] = True +# return +# elif entity == "property": +# global _OOB_PROP_UPDATE_HOOKS +# _OOB_PROP_UPDATE_HOOKS[hid][name] = True +# elif entity == "db": +# global _OOB_ATTR_UPDATE_HOOKS +# _OOB_ATTR_UPDATE_HOOKS[hid][name] = True +# elif entity == "ndb": +# global _OOB_NDB_UPDATE_HOOKS +# _OOB_NDB_UPDATE_HOOKS[hid][name] = True +# elif entity == "custom": +# global _OOB_CUSTOM_UPDATE_HOOKS +# _OOB_CUSTOM_UPDATE_HOOKS[hid][name] = True +# else: +# return None +# +#def unregister_oob_update_hook(obj, name, entity="property"): +# """ +# Un-register a report hook +# """ +# hid = hashid(obj) +# if hid: +# global _OOB_FIELD_UPDATE_HOOKS,_OOB_PROP_UPDATE_HOOKS, _OOB_ATTR_UPDATE_HOOKS +# global _OOB_CUSTOM_UPDATE_HOOKS, _OOB_NDB_UPDATE_HOOKS +# if entity == "field" and name in _OOB_FIELD_UPDATE_HOOKS: +# del _OOB_FIELD_UPDATE_HOOKS[hid][name] +# elif entity == "property" and name in _OOB_PROP_UPDATE_HOOKS: +# del _OOB_PROP_UPDATE_HOOKS[hid][name] +# elif entity == "db" and name in _OOB_ATTR_UPDATE_HOOKS: +# del _OOB_ATTR_UPDATE_HOOKS[hid][name] +# elif entity == "ndb" and name in _OOB_NDB_UPDATE_HOOKS: +# del _OOB_NDB_UPDATE_HOOKS[hid][name] +# elif entity == "custom" and name in _OOB_CUSTOM_UPDATE_HOOKS: +# del _OOB_CUSTOM_UPDATE_HOOKS[hid][name] +# else: +# return None +# +#def call_ndb_hooks(obj, attrname, value): +# """ +# No caching is done of ndb here, but +# we use this as a way to call OOB hooks. +# """ +# hid = hashid(obj) +# if hid: +# oob_hook = _OOB_NDB_UPDATE_HOOKS[hid].get(attrname) +# if oob_hook: +# oob_hook[0](obj.typeclass, attrname, value, *oob_hook[1], **oob_hook[2]) +# +#def call_custom_hooks(obj, attrname, value): +# """ +# Custom handler for developers adding their own oob hooks, e.g. to +# custom typeclass properties. +# """ +# hid = hashid(obj) +# if hid: +# oob_hook = _OOB_CUSTOM_UPDATE_HOOKS[hid].get(attrname) +# if oob_hook: +# oob_hook[0](obj.typeclass, attrname, value, *oob_hook[1], **oob_hook[2]) +# +# + +# # old cache system +# +# if _ENABLE_LOCAL_CACHES: +# # Cache stores +# _ATTR_CACHE = defaultdict(dict) +# _FIELD_CACHE = defaultdict(dict) +# _PROP_CACHE = defaultdict(dict) +# +# +# def get_cache_sizes(): +# """ +# Get cache sizes, expressed in number of objects and memory size in MB +# """ +# global _ATTR_CACHE, _FIELD_CACHE, _PROP_CACHE +# +# attr_n = sum(len(dic) for dic in _ATTR_CACHE.values()) +# attr_mb = sum(sum(getsizeof(obj) for obj in dic.values()) for dic in _ATTR_CACHE.values()) / 1024.0 +# +# field_n = sum(len(dic) for dic in _FIELD_CACHE.values()) +# field_mb = sum(sum([getsizeof(obj) for obj in dic.values()]) for dic in _FIELD_CACHE.values()) / 1024.0 +# +# prop_n = sum(len(dic) for dic in _PROP_CACHE.values()) +# prop_mb = sum(sum([getsizeof(obj) for obj in dic.values()]) for dic in _PROP_CACHE.values()) / 1024.0 +# +# return (attr_n, attr_mb), (field_n, field_mb), (prop_n, prop_mb) +# +# # on-object database field cache +# def get_field_cache(obj, name): +# "On-model Cache handler." +# global _FIELD_CACHE +# hid = hashid(obj) +# if hid: +# try: +# return _FIELD_CACHE[hid][name] +# except KeyError: +# val = _GA(obj, "db_%s" % name) +# _FIELD_CACHE[hid][name] = val +# return val +# return _GA(obj, "db_%s" % name) +# +# def set_field_cache(obj, name, val): +# "On-model Cache setter. Also updates database." +# _SA(obj, "db_%s" % name, val) +# _GA(obj, "save")() +# hid = hashid(obj) +# if hid: +# global _FIELD_CACHE +# _FIELD_CACHE[hid][name] = val +# # oob hook functionality +# if _OOB_FIELD_UPDATE_HOOKS[hid].get(name): +# _OOB_HANDLER.update(hid, name, val) +# +# def del_field_cache(obj, name): +# "On-model cache deleter" +# hid = hashid(obj) +# _SA(obj, "db_%s" % name, None) +# _GA(obj, "save")() +# if hid: +# try: +# del _FIELD_CACHE[hid][name] +# except KeyError: +# pass +# if _OOB_FIELD_UPDATE_HOOKS[hid].get(name): +# _OOB_HANDLER.update(hid, name, None) +# +# def flush_field_cache(obj=None): +# "On-model cache resetter" +# hid = hashid(obj) +# global _FIELD_CACHE +# if hid: +# try: +# del _FIELD_CACHE[hashid(obj)] +# except KeyError, e: +# pass +# else: +# # clean cache completely +# _FIELD_CACHE = defaultdict(dict) +# +# # on-object property cache (unrelated to database) +# # Note that the get/set_prop_cache handler do not actually +# # get/set the property "on" the object but only reads the +# # value to/from the cache. This is intended to be used +# # with a get/setter property on the object. +# +# def get_prop_cache(obj, name, default=None): +# "On-model Cache handler." +# global _PROP_CACHE +# hid = hashid(obj) +# if hid: +# try: +# val = _PROP_CACHE[hid][name] +# except KeyError: +# return default +# _PROP_CACHE[hid][name] = val +# return val +# return default +# +# def set_prop_cache(obj, name, val): +# "On-model Cache setter. Also updates database." +# hid = hashid(obj) +# if hid: +# global _PROP_CACHE +# _PROP_CACHE[hid][name] = val +# # oob hook functionality +# oob_hook = _OOB_PROP_UPDATE_HOOKS[hid].get(name) +# if oob_hook: +# oob_hook[0](obj.typeclass, name, val, *oob_hook[1], **oob_hook[2]) +# +# +# def del_prop_cache(obj, name): +# "On-model cache deleter" +# try: +# del _PROP_CACHE[hashid(obj)][name] +# except KeyError: +# pass +# def flush_prop_cache(obj=None): +# "On-model cache resetter" +# hid = hashid(obj) +# global _PROP_CACHE +# if hid: +# try: +# del _PROP_CACHE[hid] +# except KeyError,e: +# pass +# else: +# # clean cache completely +# _PROP_CACHE = defaultdict(dict) +# +# # attribute cache +# +# def get_attr_cache(obj, attrname): +# """ +# Attribute cache store +# """ +# return _ATTR_CACHE[hashid(obj)].get(attrname, None) +# +# def set_attr_cache(obj, attrname, attrobj): +# """ +# Cache an attribute object +# """ +# hid = hashid(obj) +# if hid: +# global _ATTR_CACHE +# _ATTR_CACHE[hid][attrname] = attrobj +# # oob hook functionality +# oob_hook = _OOB_ATTR_UPDATE_HOOKS[hid].get(attrname) +# if oob_hook: +# oob_hook[0](obj.typeclass, attrname, attrobj.value, *oob_hook[1], **oob_hook[2]) +# +# def del_attr_cache(obj, attrname): +# """ +# Remove attribute from cache +# """ +# global _ATTR_CACHE +# try: +# _ATTR_CACHE[hashid(obj)][attrname].no_cache = True +# del _ATTR_CACHE[hashid(obj)][attrname] +# except KeyError: +# pass +# +# def flush_attr_cache(obj=None): +# """ +# Flush the attribute cache for this object. +# """ +# global _ATTR_CACHE +# if obj: +# for attrobj in _ATTR_CACHE[hashid(obj)].values(): +# attrobj.no_cache = True +# del _ATTR_CACHE[hashid(obj)] +# else: +# # clean cache completely +# for objcache in _ATTR_CACHE.values(): +# for attrobj in objcache.values(): +# attrobj.no_cache = True +# _ATTR_CACHE = defaultdict(dict) +# +# +# def flush_obj_caches(obj=None): +# "Clean all caches on this object" +# flush_field_cache(obj) +# flush_prop_cache(obj) +# flush_attr_cache(obj) +# + +#else: # local caches disabled. Use simple pass-through replacements - def get_cache_sizes(): - return (0, 0), (0, 0), (0, 0) - def get_field_cache(obj, name): - return _GA(obj, "db_%s" % name) - def set_field_cache(obj, name, val): - _SA(obj, "db_%s" % name, val) - _GA(obj, "save")() - hid = hashid(obj) - if _OOB_FIELD_UPDATE_HOOKS[hid].get(name): - _OOB_HANDLER.update(hid, name, val) - def del_field_cache(obj, name): - _SA(obj, "db_%s" % name, None) - _GA(obj, "save")() - hid = hashid(obj) - if _OOB_FIELD_UPDATE_HOOKS[hid].get(name): - _OOB_HANDLER.update(hid, name, None) - def flush_field_cache(obj=None): - pass - # these should get oob handlers when oob is implemented. - def get_prop_cache(obj, name, default=None): - return None - def set_prop_cache(obj, name, val): - pass - def del_prop_cache(obj, name): - pass - def flush_prop_cache(obj=None): - pass - def get_attr_cache(obj, attrname): - return None - def set_attr_cache(obj, attrname, attrobj): - pass - def del_attr_cache(obj, attrname): - pass - def flush_attr_cache(obj=None): - pass +def get_cache_sizes(): + return (0, 0), (0, 0), (0, 0) +def get_field_cache(obj, name): + return _GA(obj, "db_%s" % name) +def set_field_cache(obj, name, val): + _SA(obj, "db_%s" % name, val) + _GA(obj, "save")() + #hid = hashid(obj) + #if _OOB_FIELD_UPDATE_HOOKS[hid].get(name): + # _OOB_HANDLER.update(hid, name, val) +def del_field_cache(obj, name): + _SA(obj, "db_%s" % name, None) + _GA(obj, "save")() + #hid = hashid(obj) + #if _OOB_FIELD_UPDATE_HOOKS[hid].get(name): + # _OOB_HANDLER.update(hid, name, None) +def flush_field_cache(obj=None): + pass +# these should get oob handlers when oob is implemented. +def get_prop_cache(obj, name, default=None): + return None +def set_prop_cache(obj, name, val): + pass +def del_prop_cache(obj, name): + pass +def flush_prop_cache(obj=None): + pass +def get_attr_cache(obj, attrname): + return None +def set_attr_cache(obj, attrname, attrobj): + pass +def del_attr_cache(obj, attrname): + pass +def flush_attr_cache(obj=None): + pass diff --git a/src/server/server.py b/src/server/server.py index 5492bdecbc..7cc7e685a9 100644 --- a/src/server/server.py +++ b/src/server/server.py @@ -30,6 +30,12 @@ from src.utils.utils import get_evennia_version, mod_import, make_iter from src.comms import channelhandler from src.server.sessionhandler import SESSIONS +# setting up server-side field cache + +from django.db.models.signals import pre_save +from src.server.caches import field_pre_save +pre_save.connect(field_pre_save, dispatch_uid="fieldcache") + _SA = object.__setattr__ if os.name == 'nt': diff --git a/src/settings_default.py b/src/settings_default.py index 745a8c4a62..7237909668 100644 --- a/src/settings_default.py +++ b/src/settings_default.py @@ -159,14 +159,23 @@ DATABASES = { 'HOST':'', 'PORT':'' }} -# Engine Config style for Django versions < 1.2 only. See above. -DATABASE_ENGINE = 'sqlite3' -DATABASE_NAME = os.path.join(GAME_DIR, 'evennia.db3') -DATABASE_USER = '' -DATABASE_PASSWORD = '' -DATABASE_HOST = '' -DATABASE_PORT = '' - +# This manages the object-level caches. Evennia will agressively cache +# fields, properties and attribute lookup. Evennia uses a fast and +# local in-memory cache by default. If a Memcached server is available +# it can be used instead (see django docs). Cache performance can be +# tweaked by adding options to each cache. Finally, any cache can +# be completely turned off by pointing its backend +# to 'django.core.cache.backends.dummy.DummyCache'. +CACHES = { + 'default': { + 'BACKEND': 'django.core.cache.backends.locmem.LocMemCache'}, + 'field_cache': { + 'BACKEND': 'django.core.cache.backends.locmem.LocMemCache'}, + 'prop_cache': { + 'BACKEND': 'django.core.cache.backends.locmem.LocMemCache'}, + 'attr_cache': { + 'BACKEND': 'django.core.cache.backends.locmem.LocMemCache'}, + } ###################################################################### # Evennia pluggable modules ###################################################################### diff --git a/src/typeclasses/models.py b/src/typeclasses/models.py index 931b0e812e..745bc98b01 100644 --- a/src/typeclasses/models.py +++ b/src/typeclasses/models.py @@ -38,11 +38,12 @@ from django.db import models, IntegrityError from django.conf import settings from django.utils.encoding import smart_str from django.contrib.contenttypes.models import ContentType +from django.db.models.fields import AutoField, FieldDoesNotExist from src.utils.idmapper.models import SharedMemoryModel from src.server.caches import get_field_cache, set_field_cache, del_field_cache from src.server.caches import get_attr_cache, set_attr_cache, del_attr_cache from src.server.caches import get_prop_cache, set_prop_cache, del_prop_cache, flush_attr_cache -from src.server.caches import call_ndb_hooks +#from src.server.caches import call_ndb_hooks from src.server.models import ServerConfig from src.typeclasses import managers from src.locks.lockhandler import LockHandler @@ -111,7 +112,7 @@ class Attribute(SharedMemoryModel): # Lock storage db_lock_storage = models.TextField('locks', blank=True) # references the object the attribute is linked to (this is set - # by each child class to this abstact class) + # by each child class to this abstract class) db_obj = None # models.ForeignKey("RefencedObject") # time stamp db_date_created = models.DateTimeField('date_created', editable=False, auto_now_add=True) @@ -455,20 +456,21 @@ class TypedObject(SharedMemoryModel): # value = self.attr and del self.attr respectively (where self # is the object in question). + # key property (wraps db_key) #@property - def __key_get(self): - "Getter. Allows for value = self.key" - return get_field_cache(self, "key") - #@key.setter - def __key_set(self, value): - "Setter. Allows for self.key = value" - set_field_cache(self, "key", value) - #@key.deleter - def __key_del(self): - "Deleter. Allows for del self.key" - raise Exception("Cannot delete objectdb key!") - key = property(__key_get, __key_set, __key_del) + #def __key_get(self): + # "Getter. Allows for value = self.key" + # return get_field_cache(self, "key") + ##@key.setter + #def __key_set(self, value): + # "Setter. Allows for self.key = value" + # set_field_cache(self, "key", value) + ##@key.deleter + #def __key_del(self): + # "Deleter. Allows for del self.key" + # raise Exception("Cannot delete objectdb key!") + #key = property(__key_get, __key_set, __key_del) # name property (wraps db_key too - alias to self.key) #@property @@ -1244,7 +1246,7 @@ class TypedObject(SharedMemoryModel): return None def __setattr__(self, key, value): # hook the oob handler here - call_ndb_hooks(self, key, value) + #call_ndb_hooks(self, key, value) _SA(self, key, value) self._ndb_holder = NdbHolder() return self._ndb_holder diff --git a/src/utils/idmapper/base.py b/src/utils/idmapper/base.py index e70f59d8c7..b50598c5a0 100755 --- a/src/utils/idmapper/base.py +++ b/src/utils/idmapper/base.py @@ -8,14 +8,19 @@ Also adds cache_size() for monitoring the size of the cache. """ import os, threading -from twisted.internet import reactor +#from twisted.internet import reactor +#from twisted.internet.threads import blockingCallFromThread from twisted.internet.reactor import callFromThread -from twisted.internet.threads import blockingCallFromThread from django.db.models.base import Model, ModelBase from django.db.models.signals import post_save, pre_delete, post_syncdb from manager import SharedMemoryManager +_GA = object.__getattribute__ +_SA = object.__setattr__ +_DA = object.__delattr__ + + # determine if our current pid is different from the server PID (i.e. # if we are in a subprocess or not) from src import PROC_MODIFIED_OBJS @@ -78,13 +83,39 @@ class SharedMemoryModelBase(ModelBase): if cached_instance is None: cached_instance = new_instance() cls.cache_instance(cached_instance) - return cached_instance + def _prepare(cls): cls.__instance_cache__ = {} #WeakValueDictionary() super(SharedMemoryModelBase, cls)._prepare() + def __init__(cls, *args, **kwargs): + "Takes field names db_* and creates property wrappers named without the db_ prefix. So db_key -> key" + super(SharedMemoryModelBase, cls).__init__(*args, **kwargs) + def create_wrapper(cls, fieldname, wrappername): + "Helper method to create property wrappers with unique names (must be in separate call)" + def _get(cls, fname): + return _GA(cls, fname) + def _set(cls, fname, value): + _SA(cls, fname, value) + _GA(cls, "save")(update_fields=[fname]) # important! + def _del(cls, fname): + raise RuntimeError("You cannot delete field %s on %s; set it to None instead." % (fname, cls)) + type(cls).__setattr__(cls, wrappername, property(lambda cls: _get(cls, fieldname), + lambda cls,val: _set(cls, fieldname, val), + lambda cls: _del(cls, fieldname))) + # eclude some models that should not auto-create wrapper fields + if cls.__name__ in ("ServerConfig", "TypeNick"): + return + # dynamically create the properties + for field in cls._meta.fields: + fieldname = field.name + wrappername = fieldname == "id" and "dbref" or fieldname.replace("db_", "") + if not hasattr(cls, wrappername): + # make sure not to overload manually created wrappers on the model + print "wrapping %s -> %s" % (fieldname, wrappername) + create_wrapper(cls, fieldname, wrappername) class SharedMemoryModel(Model): # CL: setting abstract correctly to allow subclasses to inherit the default @@ -126,6 +157,13 @@ class SharedMemoryModel(Model): return result _get_cache_key = classmethod(_get_cache_key) + def _flush_cached_by_key(cls, key): + try: + del cls.__instance_cache__[key] + except KeyError: + pass + _flush_cached_by_key = classmethod(_flush_cached_by_key) + def get_cached_instance(cls, id): """ Method to retrieve a cached instance by pk value. Returns None when not found @@ -148,13 +186,6 @@ class SharedMemoryModel(Model): return cls.__instance_cache__.values() get_all_cached_instances = classmethod(get_all_cached_instances) - def _flush_cached_by_key(cls, key): - try: - del cls.__instance_cache__[key] - except KeyError: - pass - _flush_cached_by_key = classmethod(_flush_cached_by_key) - def flush_cached_instance(cls, instance): """ Method to flush an instance from the cache. The instance will always be flushed from the cache, @@ -168,7 +199,7 @@ class SharedMemoryModel(Model): flush_instance_cache = classmethod(flush_instance_cache) def save(cls, *args, **kwargs): - "save tracking process/thread issues" + "save method tracking process/thread issues" if _IS_SUBPROCESS: # we keep a store of objects modified in subprocesses so @@ -210,8 +241,6 @@ def update_cached_instance(sender, instance, **kwargs): if not hasattr(instance, 'cache_instance'): return sender.cache_instance(instance) - from src.server.caches import flush_obj_caches - flush_obj_caches(instance) post_save.connect(update_cached_instance) def cache_size(mb=True): From b6383ddab92acbc32b7cbfbe199504f33f133fe6 Mon Sep 17 00:00:00 2001 From: Griatch Date: Wed, 29 May 2013 18:47:51 +0200 Subject: [PATCH 10/12] Moved attr_cache to new caching system, activated all attribute updating signals. --- src/objects/models.py | 7 +++ src/players/models.py | 6 ++ src/scripts/models.py | 6 ++ src/server/caches.py | 115 +++++++++++++++++++------------------ src/typeclasses/models.py | 18 +++--- src/utils/idmapper/base.py | 9 ++- 6 files changed, 95 insertions(+), 66 deletions(-) diff --git a/src/objects/models.py b/src/objects/models.py index a063b256ec..4406902428 100644 --- a/src/objects/models.py +++ b/src/objects/models.py @@ -17,11 +17,13 @@ transparently through the decorating TypeClass. import traceback from django.db import models from django.conf import settings +from django.db.models.signals import post_init, pre_delete from src.utils.idmapper.models import SharedMemoryModel from src.typeclasses.models import Attribute, TypedObject, TypeNick, TypeNickHandler from src.server.caches import get_field_cache, set_field_cache, del_field_cache from src.server.caches import get_prop_cache, set_prop_cache, del_prop_cache +from src.server.caches import attr_post_init, attr_pre_delete from src.typeclasses.typeclass import TypeClass from src.players.models import PlayerNick from src.objects.manager import ObjectManager @@ -53,6 +55,7 @@ _HERE = _("here") # #------------------------------------------------------------ + class ObjAttribute(Attribute): "Attributes for ObjectDB objects." db_obj = models.ForeignKey("ObjectDB") @@ -62,6 +65,10 @@ class ObjAttribute(Attribute): verbose_name = "Object Attribute" verbose_name_plural = "Object Attributes" +# attach the cache handlers for attribute lookup +post_init.connect(attr_post_init, sender=ObjAttribute, dispatch_uid="objattrcache") +pre_delete.connect(attr_pre_delete, sender=ObjAttribute, dispatch_uid="objattrcache") + #------------------------------------------------------------ # # Alias diff --git a/src/players/models.py b/src/players/models.py index 278ab00a14..37e639af18 100644 --- a/src/players/models.py +++ b/src/players/models.py @@ -27,9 +27,12 @@ from django.conf import settings from django.db import models from django.contrib.auth.models import User from django.utils.encoding import smart_str +from django.db.models.signals import post_init, pre_delete from src.server.caches import get_field_cache, set_field_cache, del_field_cache from src.server.caches import get_prop_cache, set_prop_cache, del_prop_cache +from src.server.caches import attr_post_init, attr_pre_delete + from src.players import manager from src.scripts.models import ScriptDB from src.typeclasses.models import Attribute, TypedObject, TypeNick, TypeNickHandler @@ -74,6 +77,9 @@ class PlayerAttribute(Attribute): "Define Django meta options" verbose_name = "Player Attribute" +post_init.connect(attr_post_init, sender=PlayerAttribute, dispatch_uid="playerattrcache") +pre_delete.connect(attr_pre_delete, sender=PlayerAttribute, dispatch_uid="playerattrcache") + #------------------------------------------------------------ # # Player Nicks diff --git a/src/scripts/models.py b/src/scripts/models.py index 538866bca9..ae79794a9a 100644 --- a/src/scripts/models.py +++ b/src/scripts/models.py @@ -26,6 +26,9 @@ Common examples of uses of Scripts: """ from django.conf import settings from django.db import models +from django.db.models.signals import post_init, pre_delete + +from src.server.caches import attr_post_init, attr_pre_delete from src.typeclasses.models import Attribute, TypedObject from django.contrib.contenttypes.models import ContentType from src.scripts.manager import ScriptManager @@ -47,6 +50,9 @@ class ScriptAttribute(Attribute): verbose_name = "Script Attribute" verbose_name_plural = "Script Attributes" +# attach cache handlers for attribute lookup +post_init.connect(attr_post_init, sender=ScriptAttribute, dispatch_uid="scriptattrcache") +pre_delete.connect(attr_pre_delete, sender=ScriptAttribute, dispatch_uid="scriptattrcache") #------------------------------------------------------------ # diff --git a/src/server/caches.py b/src/server/caches.py index d5ad57d6b5..769cbc28b1 100644 --- a/src/server/caches.py +++ b/src/server/caches.py @@ -2,6 +2,8 @@ Central caching module. """ +from django.core.cache import get_cache +#from django.db.models.signals import pre_save, pre_delete, post_init from src.server.models import ServerConfig from src.utils.utils import uses_database, to_str @@ -9,13 +11,27 @@ _GA = object.__getattribute__ _SA = object.__setattr__ _DA = object.__delattr__ +# +# Open handles to the caches +# + +_FIELD_CACHE = get_cache("field_cache") +_ATTR_CACHE = get_cache("attr_cache") + +# make sure caches are empty at startup +_FIELD_CACHE.clear() +_ATTR_CACHE.clear() + +# +# Cache key hash generation +# + if uses_database("mysql") and ServerConfig.objects.get_mysql_db_version() < '5.6.4': # mysql <5.6.4 don't support millisecond precision _DATESTRING = "%Y:%m:%d-%H:%M:%S:000000" else: _DATESTRING = "%Y:%m:%d-%H:%M:%S:%f" - def hashid(obj, suffix=""): """ Returns a per-class unique that combines the object's @@ -49,36 +65,28 @@ def hashid(obj, suffix=""): return to_str(hid) -# signal handlers +# +# Cache callback handlers +# -from django.core.cache import get_cache -#from django.db.models.signals import pre_save, pre_delete, post_init +# Field cache - makes sure to cache all database fields when +# they are saved, no matter from where. -# field cache - -_FIELD_CACHE = get_cache("field_cache") -if not _FIELD_CACHE: - raise RuntimeError("settings.CACHE does not contain a 'field_cache' entry!") - -# callback before saving an object - -def field_pre_save(sender, **kwargs): +# callback to pre_save signal (connected in src.server.server) +def field_pre_save(sender, instance=None, update_fields=None, raw=False, **kwargs): """ Called at the beginning of the save operation. The save method must be called with the update_fields keyword in order to """ - global _FIELD_CACHE - - if kwargs.pop("raw", False): return - instance = kwargs.pop("instance") - fields = kwargs.pop("update_fields", None) - if fields: + if raw: + return + if update_fields: # this is a list of strings at this point. We want field objects - fields = (instance._meta.get_field_by_name(field)[0] for field in fields) + update_fields = (instance._meta.get_field_by_name(field)[0] for field in update_fields) else: # meta.fields are already field objects - fields = instance._meta.fields - for field in fields: + update_fields = instance._meta.fields + for field in update_fields: fieldname = field.name new_value = field.value_from_object(instance) handlername = "_%s_handler" % fieldname @@ -98,39 +106,34 @@ def field_pre_save(sender, **kwargs): # update cache _FIELD_CACHE.set(hid, new_value) -# goes into server: -#pre_save.connect(field_pre_save, dispatch_uid="fieldcache") +# Attr cache - caching the attribute objects related to a given object to +# avoid lookups more than necessary (this makes Attributes en par in speed +# to any property). + +# connected to post_init signal (connected in respective Attribute model) +def attr_post_init(sender, instance=None, **kwargs): + "Called when attribute is created or retrieved in connection with obj." + #print "attr_post_init:", instance, instance.db_obj, instance.db_key + hid = hashid(_GA(instance, "db_obj"), "-%s" % _GA(instance, "db_key")) + if hid: + _ATTR_CACHE.set(hid, sender) +# connected to pre_delete signal (connected in respective Attribute model) +def attr_pre_delete(sender, instance=None, **kwargs): + "Called when attribute is deleted (del_attribute)" + #print "attr_pre_delete:", instance, instance.db_obj, instance.db_key + hid = hashid(_GA(instance, "db_obj"), "-%s" % _GA(instance, "db_key")) + if hid: + #print "attr_pre_delete:", _GA(instance, "db_key") + _ATTR_CACHE.delete(hid) +# access method +def get_attr_cache(obj, attrname): + "Called by get_attribute" + hid = hashid(obj, "-%s" % attrname) + _ATTR_CACHE.delete(hid) + return hid and _ATTR_CACHE.get(hid, None) or None + + -## attr cache - caching the attribute objects related to a given object to -## avoid lookups more than necessary (this makes attributes en par in speed -## to any property). The signal is triggered by the Attribute itself when it -## is created or deleted (it holds a reference to the object) -# -#_ATTR_CACHE = get_cache("attr_cache") -#if not _ATTR_CACHE: -# raise RuntimeError("settings.CACHE does not contain an 'attr_cache' entry!") -# -#def attr_post_init(sender, **kwargs): -# "Called when attribute is created or retrieved in connection with obj." -# hid = hashid(sender.db_obj, "-%s" % sender.db_key) -# _ATTR_CACHE.set(hid, sender) -#def attr_pre_delete(sender, **kwargs): -# "Called when attribute is deleted (del_attribute)" -# hid = hashid(sender.db_obj, "-%s" % sender.db_key) -# _ATTR_CACHE.delete(hid) -# -### goes into server: -#from src.objects.models import ObjAttribute -#from src.scripts.models import ScriptAttribute -#from src.players.models import PlayerAttribute -#post_init.connect(attr_post_init, sender=ObjAttribute, dispatch_uid="objattrcache") -#post_init.connect(attr_post_init, sender=ScriptAttribute, dispatch_uid="scriptattrcache") -#post_init.connect(attr_post_init, sender=PlayerAttribute, dispatch_uid="playerattrcache") -#pre_delete.connect(attr_pre_delete, sender=ObjAttribute, dispatch_uid="objattrcache") -#pre_delete.connect(attr_pre_delete, sender=ScriptAttribute, dispatch_uid="scriptattrcache") -#pre_delete.connect(attr_pre_delete, sender=PlayerAttribute, dispatch_uid="playerattrcache") -# -# ## property cache - this doubles as a central cache and as a way ## to trigger oob on such changes. # @@ -456,8 +459,8 @@ def del_prop_cache(obj, name): pass def flush_prop_cache(obj=None): pass -def get_attr_cache(obj, attrname): - return None +#def get_attr_cache(obj, attrname): +# return None def set_attr_cache(obj, attrname, attrobj): pass def del_attr_cache(obj, attrname): diff --git a/src/typeclasses/models.py b/src/typeclasses/models.py index 745bc98b01..79f1da70a8 100644 --- a/src/typeclasses/models.py +++ b/src/typeclasses/models.py @@ -103,7 +103,7 @@ class Attribute(SharedMemoryModel): # Attribute Database Model setup # # - # These databse fields are all set using their corresponding properties, + # These database fields are all set using their corresponding properties, # named same as the field, but withtout the db_* prefix. db_key = models.CharField('key', max_length=255, db_index=True) @@ -933,10 +933,11 @@ class TypedObject(SharedMemoryModel): if not get_attr_cache(self, attribute_name): attrib_obj = _GA(self, "_attribute_class").objects.filter( db_obj=self, db_key__iexact=attribute_name) - if attrib_obj: - set_attr_cache(self, attribute_name, attrib_obj[0]) - else: + if not attrib_obj: return False + #set_attr_cache(self, attribute_name, attrib_obj[0]) + #else: + # return False return True def set_attribute(self, attribute_name, new_value=None, lockstring=""): @@ -953,6 +954,7 @@ class TypedObject(SharedMemoryModel): types checked by secureattr are 'attrread','attredit','attrcreate'. """ attrib_obj = get_attr_cache(self, attribute_name) + print "set_attribute:", attribute_name, attrib_obj if not attrib_obj: attrclass = _GA(self, "_attribute_class") # check if attribute already exists. @@ -975,7 +977,7 @@ class TypedObject(SharedMemoryModel): flush_attr_cache(self) self.delete() raise IntegrityError("Attribute could not be saved - object %s was deleted from database." % self.key) - set_attr_cache(self, attribute_name, attrib_obj) + #set_attr_cache(self, attribute_name, attrib_obj) def get_attribute_obj(self, attribute_name, default=None): """ @@ -987,7 +989,7 @@ class TypedObject(SharedMemoryModel): db_obj=self, db_key__iexact=attribute_name) if not attrib_obj: return default - set_attr_cache(self, attribute_name, attrib_obj[0]) #query is first evaluated here + #set_attr_cache(self, attribute_name, attrib_obj[0]) #query is first evaluated here return attrib_obj[0] return attrib_obj @@ -1006,7 +1008,7 @@ class TypedObject(SharedMemoryModel): db_obj=self, db_key__iexact=attribute_name) if not attrib_obj: return default - set_attr_cache(self, attribute_name, attrib_obj[0]) #query is first evaluated here + #set_attr_cache(self, attribute_name, attrib_obj[0]) #query is first evaluated here return attrib_obj[0].value return attrib_obj.value @@ -1023,7 +1025,7 @@ class TypedObject(SharedMemoryModel): db_obj=self, db_key__iexact=attribute_name) if not attrib_obj: raise AttributeError - set_attr_cache(self, attribute_name, attrib_obj[0]) #query is first evaluated here + #set_attr_cache(self, attribute_name, attrib_obj[0]) #query is first evaluated here return attrib_obj[0].value return attrib_obj.value diff --git a/src/utils/idmapper/base.py b/src/utils/idmapper/base.py index b50598c5a0..6563b220af 100755 --- a/src/utils/idmapper/base.py +++ b/src/utils/idmapper/base.py @@ -91,7 +91,12 @@ class SharedMemoryModelBase(ModelBase): super(SharedMemoryModelBase, cls)._prepare() def __init__(cls, *args, **kwargs): - "Takes field names db_* and creates property wrappers named without the db_ prefix. So db_key -> key" + """ + Takes field names db_* and creates property wrappers named without the db_ prefix. So db_key -> key + This wrapper happens on the class level, so there is no overhead when creating objects. If a class + already has a wrapper of the given name, the automatic creation is skipped. Note: Remember to + document this auto-wrapping in the class header, this could seem very much like magic to the user otherwise. + """ super(SharedMemoryModelBase, cls).__init__(*args, **kwargs) def create_wrapper(cls, fieldname, wrappername): "Helper method to create property wrappers with unique names (must be in separate call)" @@ -114,7 +119,7 @@ class SharedMemoryModelBase(ModelBase): wrappername = fieldname == "id" and "dbref" or fieldname.replace("db_", "") if not hasattr(cls, wrappername): # make sure not to overload manually created wrappers on the model - print "wrapping %s -> %s" % (fieldname, wrappername) + #print "wrapping %s -> %s" % (fieldname, wrappername) create_wrapper(cls, fieldname, wrappername) class SharedMemoryModel(Model): From fb3259be8ce15eb1c80d8e022aa864e1a9825d0d Mon Sep 17 00:00:00 2001 From: Griatch Date: Wed, 29 May 2013 23:07:44 +0200 Subject: [PATCH 11/12] Activated propcache with new cache system. Still not functioning correctly, also the content cache needs to be handled. --- src/objects/models.py | 4 +- src/server/caches.py | 119 +++++++++++++++++++++++-------------- src/typeclasses/models.py | 28 ++++----- src/utils/idmapper/base.py | 2 +- 4 files changed, 87 insertions(+), 66 deletions(-) diff --git a/src/objects/models.py b/src/objects/models.py index 4406902428..f55dd54be6 100644 --- a/src/objects/models.py +++ b/src/objects/models.py @@ -65,7 +65,7 @@ class ObjAttribute(Attribute): verbose_name = "Object Attribute" verbose_name_plural = "Object Attributes" -# attach the cache handlers for attribute lookup +# attach the cache handlers post_init.connect(attr_post_init, sender=ObjAttribute, dispatch_uid="objattrcache") pre_delete.connect(attr_pre_delete, sender=ObjAttribute, dispatch_uid="objattrcache") @@ -248,7 +248,7 @@ class ObjectDB(TypedObject): "Deleter. Allows for del self.aliases" for alias in Alias.objects.filter(db_obj=self): alias.delete() - del_prop_cache(self, "_aliases") + #del_prop_cache(self, "_aliases") aliases = property(__aliases_get, __aliases_set, __aliases_del) # player property (wraps db_player) diff --git a/src/server/caches.py b/src/server/caches.py index 769cbc28b1..27d2b5574c 100644 --- a/src/server/caches.py +++ b/src/server/caches.py @@ -2,6 +2,7 @@ Central caching module. """ +from django.dispatch import Signal from django.core.cache import get_cache #from django.db.models.signals import pre_save, pre_delete, post_init from src.server.models import ServerConfig @@ -17,14 +18,16 @@ _DA = object.__delattr__ _FIELD_CACHE = get_cache("field_cache") _ATTR_CACHE = get_cache("attr_cache") +_PROP_CACHE = get_cache("prop_cache") # make sure caches are empty at startup _FIELD_CACHE.clear() _ATTR_CACHE.clear() +_PROP_CACHE.clear() -# +#------------------------------------------------------------ # Cache key hash generation -# +#------------------------------------------------------------ if uses_database("mysql") and ServerConfig.objects.get_mysql_db_version() < '5.6.4': # mysql <5.6.4 don't support millisecond precision @@ -58,19 +61,23 @@ def hashid(obj, suffix=""): if not idnum or not date: # this will happen if setting properties on an object which is not yet saved return None - # build the hashid - hid = "%s-%s-#%s%s" % (_GA(obj, "__class__"), date, idnum, suffix) - hid = hid.replace(" ", "") + hid = "%s-%s-#%s" % (_GA(obj, "__class__"), date, idnum) + hid = hid.replace(" ", "") # we have to remove the class-name's space, for memcached's sake + # we cache the object part of the hashid to avoid too many object lookups _SA(obj, "_hashid", hid) + # build the complete hashid + hid = "%s%s" % (hid, suffix) return to_str(hid) -# +#------------------------------------------------------------ # Cache callback handlers -# +#------------------------------------------------------------ +#------------------------------------------------------------ # Field cache - makes sure to cache all database fields when # they are saved, no matter from where. +#------------------------------------------------------------ # callback to pre_save signal (connected in src.server.server) def field_pre_save(sender, instance=None, update_fields=None, raw=False, **kwargs): @@ -106,9 +113,18 @@ def field_pre_save(sender, instance=None, update_fields=None, raw=False, **kwarg # update cache _FIELD_CACHE.set(hid, new_value) +# access method + +def flush_field_cache(): + "Clear the field cache" + _FIELD_CACHE.clear() + + +#------------------------------------------------------------ # Attr cache - caching the attribute objects related to a given object to # avoid lookups more than necessary (this makes Attributes en par in speed # to any property). +#------------------------------------------------------------ # connected to post_init signal (connected in respective Attribute model) def attr_post_init(sender, instance=None, **kwargs): @@ -117,6 +133,7 @@ def attr_post_init(sender, instance=None, **kwargs): hid = hashid(_GA(instance, "db_obj"), "-%s" % _GA(instance, "db_key")) if hid: _ATTR_CACHE.set(hid, sender) + # connected to pre_delete signal (connected in respective Attribute model) def attr_pre_delete(sender, instance=None, **kwargs): "Called when attribute is deleted (del_attribute)" @@ -125,42 +142,52 @@ def attr_pre_delete(sender, instance=None, **kwargs): if hid: #print "attr_pre_delete:", _GA(instance, "db_key") _ATTR_CACHE.delete(hid) -# access method + +# access methods + def get_attr_cache(obj, attrname): "Called by get_attribute" hid = hashid(obj, "-%s" % attrname) _ATTR_CACHE.delete(hid) return hid and _ATTR_CACHE.get(hid, None) or None +def set_attr_cache(attrobj): + "Set the attr cache manually; this can be used to update" + attr_post_init(None, instance=attrobj) +def flush_attr_cache(): + "Clear attribute cache" + _ATTR_CACHE.clear() -## property cache - this doubles as a central cache and as a way -## to trigger oob on such changes. -# -#from django.dispatch import Signal -#_PROP_CACHE = get_cache("prop_cache") -#if not _PROP_CACHE: -# raise RuntimeError("settings.CACHE does not contain a 'prop_cache' entry!") -# -#PROP_POST_UPDATE = Signal(providing_args=["propname", "propvalue"]) -# -#def prop_update(sender, **kwargs): -# "Called when a propery is updated. kwargs are propname and propvalue." -# propname, propvalue = kwargs.pop("propname", None), kwargs.pop("propvalue", None) -# if propname == None: return -# hid = hashid(sender, "-%s" % propname) -# _PROP_CACHE.set(hid, propvalue) -# -#PROP_POST_UPDATE.connect(prop_update, dispatch_uid="propcache") -# -# - - +#------------------------------------------------------------ +# Property cache - this is a generic cache for properties stored on models. +#------------------------------------------------------------ +# access methods +def get_prop_cache(obj, propname): + "retrieve data from cache" + hid = hashid(obj, "-%s" % propname) + if hid: + #print "get_prop_cache", hid, propname, _PROP_CACHE.get(hid, None) + return _PROP_CACHE.get(hid, None) +def set_prop_cache(obj, propname, propvalue): + "Set property cache" + hid = hashid(obj, "-%s" % propname) + if hid: + #print "set_prop_cache", propname, propvalue + _PROP_CACHE.set(hid, propvalue) +def del_prop_cache(obj, propname): + "Delete element from property cache" + hid = hashid(obj, "-%s" % propname) + if hid: + _PROP_CACHE.delete(hid) +def flush_prop_cache(): + "Clear property cache" + _PROP_CACHE.clear() #_ENABLE_LOCAL_CACHES = settings.GAME_CACHE_TYPE @@ -448,23 +475,23 @@ def del_field_cache(obj, name): #hid = hashid(obj) #if _OOB_FIELD_UPDATE_HOOKS[hid].get(name): # _OOB_HANDLER.update(hid, name, None) -def flush_field_cache(obj=None): - pass +#def flush_field_cache(obj=None): +# pass # these should get oob handlers when oob is implemented. -def get_prop_cache(obj, name, default=None): - return None -def set_prop_cache(obj, name, val): - pass -def del_prop_cache(obj, name): - pass -def flush_prop_cache(obj=None): - pass +#def get_prop_cache(obj, name, default=None): +# return None +#def set_prop_cache(obj, name, val): +# pass +#def del_prop_cache(obj, name): +# pass +#def flush_prop_cache(obj=None): +# pass #def get_attr_cache(obj, attrname): # return None -def set_attr_cache(obj, attrname, attrobj): - pass -def del_attr_cache(obj, attrname): - pass -def flush_attr_cache(obj=None): - pass +#def set_attr_cache(obj, attrname, attrobj): +# pass +#def del_attr_cache(obj, attrname): +# pass +#def flush_attr_cache(obj=None): +# pass diff --git a/src/typeclasses/models.py b/src/typeclasses/models.py index 79f1da70a8..24696c7881 100644 --- a/src/typeclasses/models.py +++ b/src/typeclasses/models.py @@ -41,7 +41,7 @@ from django.contrib.contenttypes.models import ContentType from django.db.models.fields import AutoField, FieldDoesNotExist from src.utils.idmapper.models import SharedMemoryModel from src.server.caches import get_field_cache, set_field_cache, del_field_cache -from src.server.caches import get_attr_cache, set_attr_cache, del_attr_cache +from src.server.caches import get_attr_cache, set_attr_cache from src.server.caches import get_prop_cache, set_prop_cache, del_prop_cache, flush_attr_cache #from src.server.caches import call_ndb_hooks from src.server.models import ServerConfig @@ -60,8 +60,6 @@ _CTYPEGET = ContentType.objects.get _GA = object.__getattribute__ _SA = object.__setattr__ _DA = object.__delattr__ -#_PLOADS = pickle.loads -#_PDUMPS = pickle.dumps #------------------------------------------------------------ # @@ -933,11 +931,10 @@ class TypedObject(SharedMemoryModel): if not get_attr_cache(self, attribute_name): attrib_obj = _GA(self, "_attribute_class").objects.filter( db_obj=self, db_key__iexact=attribute_name) - if not attrib_obj: + if attrib_obj: + set_attr_cache(attrib_obj[0]) + else: return False - #set_attr_cache(self, attribute_name, attrib_obj[0]) - #else: - # return False return True def set_attribute(self, attribute_name, new_value=None, lockstring=""): @@ -954,7 +951,6 @@ class TypedObject(SharedMemoryModel): types checked by secureattr are 'attrread','attredit','attrcreate'. """ attrib_obj = get_attr_cache(self, attribute_name) - print "set_attribute:", attribute_name, attrib_obj if not attrib_obj: attrclass = _GA(self, "_attribute_class") # check if attribute already exists. @@ -963,8 +959,9 @@ class TypedObject(SharedMemoryModel): if attrib_obj: # use old attribute attrib_obj = attrib_obj[0] + set_attr_cache(attrib_obj) # renew cache else: - # no match; create new attribute + # no match; create new attribute (this will cache automatically) attrib_obj = attrclass(db_key=attribute_name, db_obj=self) if lockstring: attrib_obj.locks.add(lockstring) @@ -977,7 +974,6 @@ class TypedObject(SharedMemoryModel): flush_attr_cache(self) self.delete() raise IntegrityError("Attribute could not be saved - object %s was deleted from database." % self.key) - #set_attr_cache(self, attribute_name, attrib_obj) def get_attribute_obj(self, attribute_name, default=None): """ @@ -989,7 +985,7 @@ class TypedObject(SharedMemoryModel): db_obj=self, db_key__iexact=attribute_name) if not attrib_obj: return default - #set_attr_cache(self, attribute_name, attrib_obj[0]) #query is first evaluated here + set_attr_cache(attrib_obj[0]) #query is first evaluated here return attrib_obj[0] return attrib_obj @@ -1008,7 +1004,7 @@ class TypedObject(SharedMemoryModel): db_obj=self, db_key__iexact=attribute_name) if not attrib_obj: return default - #set_attr_cache(self, attribute_name, attrib_obj[0]) #query is first evaluated here + set_attr_cache(attrib_obj[0]) #query is first evaluated here return attrib_obj[0].value return attrib_obj.value @@ -1025,7 +1021,7 @@ class TypedObject(SharedMemoryModel): db_obj=self, db_key__iexact=attribute_name) if not attrib_obj: raise AttributeError - #set_attr_cache(self, attribute_name, attrib_obj[0]) #query is first evaluated here + set_attr_cache(attrib_obj[0]) #query is first evaluated here return attrib_obj[0].value return attrib_obj.value @@ -1037,8 +1033,7 @@ class TypedObject(SharedMemoryModel): """ attr_obj = get_attr_cache(self, attribute_name) if attr_obj: - del_attr_cache(self, attribute_name) - attr_obj.delete() + attr_obj.delete() # this will clear attr cache automatically else: try: _GA(self, "_attribute_class").objects.filter( @@ -1055,8 +1050,7 @@ class TypedObject(SharedMemoryModel): """ attr_obj = get_attr_cache(self, attribute_name) if attr_obj: - del_attr_cache(self, attribute_name) - attr_obj.delete() + attr_obj.delete() # this will clear attr cache automatically else: try: _GA(self, "_attribute_class").objects.filter( diff --git a/src/utils/idmapper/base.py b/src/utils/idmapper/base.py index 6563b220af..30b22d1cd1 100755 --- a/src/utils/idmapper/base.py +++ b/src/utils/idmapper/base.py @@ -116,7 +116,7 @@ class SharedMemoryModelBase(ModelBase): # dynamically create the properties for field in cls._meta.fields: fieldname = field.name - wrappername = fieldname == "id" and "dbref" or fieldname.replace("db_", "") + wrappername = fieldname == "id" and "dbid" or fieldname.replace("db_", "") if not hasattr(cls, wrappername): # make sure not to overload manually created wrappers on the model #print "wrapping %s -> %s" % (fieldname, wrappername) From 41235c25a2c20109e42530ae80e115a8fc12100e Mon Sep 17 00:00:00 2001 From: Griatch Date: Thu, 30 May 2013 00:49:47 +0200 Subject: [PATCH 12/12] Changed propcache back to a simple dict. Working on content cache, not working yet. --- src/objects/models.py | 131 ++++++++++++++++++++++++++------------ src/server/caches.py | 25 +++++--- src/typeclasses/models.py | 18 +++--- 3 files changed, 114 insertions(+), 60 deletions(-) diff --git a/src/objects/models.py b/src/objects/models.py index f55dd54be6..8237bd0da2 100644 --- a/src/objects/models.py +++ b/src/objects/models.py @@ -306,27 +306,16 @@ class ObjectDB(TypedObject): del_field_cache(self, "sessid") sessid = property(__sessid_get, __sessid_set, __sessid_del) - # location property (wraps db_location) - #@property - def __location_get(self): - "Getter. Allows for value = self.location." - loc = get_field_cache(self, "location") - if loc: - return _GA(loc, "typeclass") - return None - #@location.setter - def __location_set(self, location): - "Setter. Allows for self.location = location" + def _db_location_handler(self, new_value, old_value=None): + "This handles changes to the db_location field." + print "db_location_handler:", new_value, old_value try: - old_loc = _GA(self, "location") - if ObjectDB.objects.dbref(location): - # dbref search - loc = ObjectDB.objects.dbref_search(location) - loc = loc and _GA(loc, "dbobj") - elif location and type(location) != ObjectDB: - loc = _GA(location, "dbobj") - else: - loc = location + old_loc = old_value + # new_value can be dbref, typeclass or dbmodel + if ObjectDB.objects.dbref(new_value, reqhash=False): + loc = ObjectDB.objects.dbref_search(new_value) + # this should not fail if new_value is valid. + loc = _GA(loc, "dbobj") # recursive location check def is_loc_loop(loc, depth=0): @@ -340,32 +329,85 @@ class ObjectDB(TypedObject): except RuntimeWarning: pass # set the location - set_field_cache(self, "location", loc) + _SA(self, "db_location", loc) # update the contents of each location if old_loc: - _GA(_GA(old_loc, "dbobj"), "contents_update")() + _GA(_GA(old_loc, "dbobj"), "contents_update")(self, remove=True) if loc: - _GA(loc, "contents_update")() + _GA(loc, "contents_update")(self) except RuntimeError: string = "Cannot set location, " - string += "%s.location = %s would create a location-loop." % (self.key, loc) + string += "%s.location = %s would create a location-loop." % (self.key, new_value) _GA(self, "msg")(_(string)) logger.log_trace(string) raise RuntimeError(string) except Exception, e: string = "Cannot set location (%s): " % str(e) - string += "%s is not a valid location." % location + string += "%s is not a valid location." % new_value _GA(self, "msg")(_(string)) logger.log_trace(string) raise Exception(string) - #@location.deleter - def __location_del(self): - "Deleter. Allows for del self.location" - _GA(self, "location").contents_update() - _SA(self, "db_location", None) - _GA(self, "save")() - del_field_cache(self, "location") - location = property(__location_get, __location_set, __location_del) + + ## location property (wraps db_location) + ##@property + #def __location_get(self): + # "Getter. Allows for value = self.location." + # loc = get_field_cache(self, "location") + # if loc: + # return _GA(loc, "typeclass") + # return None + ##@location.setter + #def __location_set(self, location): + # "Setter. Allows for self.location = location" + # try: + # old_loc = _GA(self, "location") + # if ObjectDB.objects.dbref(location): + # # dbref search + # loc = ObjectDB.objects.dbref_search(location) + # loc = loc and _GA(loc, "dbobj") + # elif location and type(location) != ObjectDB: + # loc = _GA(location, "dbobj") + # else: + # loc = location + + # # recursive location check + # def is_loc_loop(loc, depth=0): + # "Recursively traverse the target location to make sure we are not in it." + # if depth > 10: return + # elif loc == self: raise RuntimeError + # elif loc == None: raise RuntimeWarning # just to quickly get out + # return is_loc_loop(_GA(loc, "db_location"), depth+1) + # # check so we don't create a location loop - if so, RuntimeError will be raised. + # try: is_loc_loop(loc) + # except RuntimeWarning: pass + + # # set the location + # set_field_cache(self, "location", loc) + # # update the contents of each location + # if old_loc: + # _GA(_GA(old_loc, "dbobj"), "contents_update")() + # if loc: + # _GA(loc, "contents_update")() + # except RuntimeError: + # string = "Cannot set location, " + # string += "%s.location = %s would create a location-loop." % (self.key, loc) + # _GA(self, "msg")(_(string)) + # logger.log_trace(string) + # raise RuntimeError(string) + # except Exception, e: + # string = "Cannot set location (%s): " % str(e) + # string += "%s is not a valid location." % location + # _GA(self, "msg")(_(string)) + # logger.log_trace(string) + # raise Exception(string) + ##@location.deleter + #def __location_del(self): + # "Deleter. Allows for del self.location" + # _GA(self, "location").contents_update() + # _SA(self, "db_location", None) + # _GA(self, "save")() + # del_field_cache(self, "location") + #location = property(__location_get, __location_set, __location_del) # home property (wraps db_home) #@property @@ -522,19 +564,26 @@ class ObjectDB(TypedObject): exclude = make_iter(exclude) if cont == None: cont = _GA(self, "contents_update")() - return [obj for obj in cont if obj not in exclude] + return [obj for obj in cont.values() if obj not in exclude] contents = property(contents_get) - def contents_update(self): + def contents_update(self, obj=None, remove=False): """ - Updates the contents property of the object with a new - object Called by - self.location_set. + Updates the contents property of the object - obj - - remove (true/false) - remove obj from content list + add - object to add to content list + remove object to remove from content list """ - cont = ObjectDB.objects.get_contents(self) + cont = get_prop_cache(self, "_contents") + if not cont: + cont = {} + if obj: + if remove: + cont.pop(self.dbid, None) + else: + cont[self.dbid] = obj + else: + cont = dict((o.dbid, o) for o in ObjectDB.objects.get_contents(self)) set_prop_cache(self, "_contents", cont) return cont diff --git a/src/server/caches.py b/src/server/caches.py index 27d2b5574c..9cbeb5bf4e 100644 --- a/src/server/caches.py +++ b/src/server/caches.py @@ -2,6 +2,8 @@ Central caching module. """ + +from collections import defaultdict from django.dispatch import Signal from django.core.cache import get_cache #from django.db.models.signals import pre_save, pre_delete, post_init @@ -18,12 +20,13 @@ _DA = object.__delattr__ _FIELD_CACHE = get_cache("field_cache") _ATTR_CACHE = get_cache("attr_cache") -_PROP_CACHE = get_cache("prop_cache") +#_PROP_CACHE = get_cache("prop_cache") +_PROP_CACHE = defaultdict(dict) # make sure caches are empty at startup _FIELD_CACHE.clear() _ATTR_CACHE.clear() -_PROP_CACHE.clear() +#_PROP_CACHE.clear() #------------------------------------------------------------ # Cache key hash generation @@ -106,7 +109,7 @@ def field_pre_save(sender, instance=None, update_fields=None, raw=False, **kwarg old_value = _FIELD_CACHE.get(hid) if hid else None # the handler may modify the stored value in various ways # don't catch exceptions, the handler must work! - new_value = handler(instance, new_value, oldval=old_value) + new_value = handler(new_value, old_value=old_value) # we re-assign this to the field, save() will pick it up from there _SA(instance, fieldname, new_value) if hid: @@ -170,24 +173,28 @@ def get_prop_cache(obj, propname): hid = hashid(obj, "-%s" % propname) if hid: #print "get_prop_cache", hid, propname, _PROP_CACHE.get(hid, None) - return _PROP_CACHE.get(hid, None) + return _PROP_CACHE[hid].get(propname, None) def set_prop_cache(obj, propname, propvalue): "Set property cache" hid = hashid(obj, "-%s" % propname) if hid: #print "set_prop_cache", propname, propvalue - _PROP_CACHE.set(hid, propvalue) + _PROP_CACHE[hid][propname] = propvalue + #_PROP_CACHE.set(hid, propvalue) def del_prop_cache(obj, propname): "Delete element from property cache" hid = hashid(obj, "-%s" % propname) - if hid: - _PROP_CACHE.delete(hid) + if hid and propname in _PROP_CACHE[hid]: + del _PROP_CACHE[hid][propname] + #_PROP_CACHE.delete(hid) def flush_prop_cache(): "Clear property cache" - _PROP_CACHE.clear() + global _PROP_CACHE + _PROP_CACHE = defaultdict(dict) + #_PROP_CACHE.clear() #_ENABLE_LOCAL_CACHES = settings.GAME_CACHE_TYPE @@ -491,7 +498,7 @@ def del_field_cache(obj, name): #def set_attr_cache(obj, attrname, attrobj): # pass #def del_attr_cache(obj, attrname): -# pass +# passk #def flush_attr_cache(obj=None): # pass diff --git a/src/typeclasses/models.py b/src/typeclasses/models.py index 24696c7881..a39860b898 100644 --- a/src/typeclasses/models.py +++ b/src/typeclasses/models.py @@ -474,34 +474,32 @@ class TypedObject(SharedMemoryModel): #@property def __name_get(self): "Getter. Allows for value = self.name" - return get_field_cache(self, "key") - #@name.setter + return self.key + #@name.sette def __name_set(self, value): "Setter. Allows for self.name = value" - set_field_cache(self, "key", value) + self.key = value #@name.deleter def __name_del(self): "Deleter. Allows for del self.name" raise Exception("Cannot delete name!") name = property(__name_get, __name_set, __name_del) - # typeclass_path property + # typeclass_path property - we don't cache this. #@property def __typeclass_path_get(self): "Getter. Allows for value = self.typeclass_path" - return get_field_cache(self, "typeclass_path") + return _GA(self, "db_typeclass_path")#get_field_cache(self, "typeclass_path") #@typeclass_path.setter def __typeclass_path_set(self, value): "Setter. Allows for self.typeclass_path = value" - set_field_cache(self, "typeclass_path", value) - _SA(self, "_cached_typeclass", None) + _SA(self, "db_typeclass_path", value) + _GA(self, "save")(update_fields=["db_typeclass_path"]) #@typeclass_path.deleter def __typeclass_path_del(self): "Deleter. Allows for del self.typeclass_path" self.db_typeclass_path = "" - self.save() - del_field_cache(self, "typeclass_path") - _SA(self, "_cached_typeclass", None) + _GA(self, "save")(update_fields=["db_typeclass_path"]) typeclass_path = property(__typeclass_path_get, __typeclass_path_set, __typeclass_path_del) # date_created property