Initial support for pypy + evennia

This commit is contained in:
Stephen Hansen 2013-01-31 14:34:52 -08:00
parent 41859d9e46
commit 5cfff771f2
5 changed files with 59 additions and 37 deletions

View file

@ -283,7 +283,7 @@ def run_menu():
This launches an interactive menu.
"""
cmdstr = ["python", "runner.py"]
cmdstr = [sys.executable, "runner.py"]
while True:
# menu loop
@ -350,7 +350,7 @@ def handle_args(options, mode, service):
"""
inter = options.interactive
cmdstr = ["python", "runner.py"]
cmdstr = [sys.executable, "runner.py"]
errmsg = "The %s does not seem to be running."
if mode == 'start':

View file

@ -16,10 +16,16 @@ matter the value of this file.
"""
import os
import sys
import time
from optparse import OptionParser
from subprocess import Popen
import Queue, thread
try:
import __pypy__ as is_pypy
except ImportError:
is_pypy = False
#
# System Configuration
#
@ -163,6 +169,10 @@ def start_services(server_argv, portal_argv):
return
if portal_argv:
if is_pypy:
# This is a hack; without it, the *server* stalls out and never finishes loading under PyPy.
time.sleep(1)
try:
if get_restart_mode(PORTAL_RESTART) == "True":
# start portal as interactive, reloadable thread

View file

@ -17,7 +17,7 @@ from src.server.sessionhandler import SESSIONS
from src.scripts.models import ScriptDB
from src.objects.models import ObjectDB
from src.players.models import PlayerDB
from src.utils import logger, utils, gametime, create
from src.utils import logger, utils, gametime, create, is_pypy
from src.commands.default.muxcommand import MuxCommand
# delayed imports
@ -692,24 +692,28 @@ class CmdServerLoad(MuxCommand):
for row in ftable:
string += "\n " + "{w%s{n" % row[0] + "".join(row[1:])
# object cache size
cachedict = _idmapper.cache_size()
totcache = cachedict["_total"]
string += "\n{w Database entity (idmapper) cache usage:{n %5.2f MB (%i items)" % (totcache[1], totcache[0])
sorted_cache = sorted([(key, tup[0], tup[1]) for key, tup in cachedict.items() if key !="_total" and tup[0] > 0],
key=lambda tup: tup[2], reverse=True)
table = [[tup[0] for tup in sorted_cache],
["%5.2f MB" % tup[2] for tup in sorted_cache],
["%i item(s)" % tup[1] for tup in sorted_cache]]
ftable = utils.format_table(table, 5)
for row in ftable:
string += "\n " + row[0] + row[1] + row[2]
# get sizes of other caches
attr_cache_info, field_cache_info, prop_cache_info = get_cache_sizes()
#size = sum([sum([getsizeof(obj) for obj in dic.values()]) for dic in _attribute_cache.values()])/1024.0
#count = sum([len(dic) for dic in _attribute_cache.values()])
string += "\n{w On-entity Attribute cache usage:{n %5.2f MB (%i attrs)" % (attr_cache_info[1], attr_cache_info[0])
string += "\n{w On-entity Field cache usage:{n %5.2f MB (%i fields)" % (field_cache_info[1], field_cache_info[0])
string += "\n{w On-entity Property cache usage:{n %5.2f MB (%i props)" % (prop_cache_info[1], prop_cache_info[0])
if not is_pypy:
# Cache size measurements are not available on PyPy because it lacks sys.getsizeof
# object cache size
cachedict = _idmapper.cache_size()
totcache = cachedict["_total"]
string += "\n{w Database entity (idmapper) cache usage:{n %5.2f MB (%i items)" % (totcache[1], totcache[0])
sorted_cache = sorted([(key, tup[0], tup[1]) for key, tup in cachedict.items() if key !="_total" and tup[0] > 0],
key=lambda tup: tup[2], reverse=True)
table = [[tup[0] for tup in sorted_cache],
["%5.2f MB" % tup[2] for tup in sorted_cache],
["%i item(s)" % tup[1] for tup in sorted_cache]]
ftable = utils.format_table(table, 5)
for row in ftable:
string += "\n " + row[0] + row[1] + row[2]
# get sizes of other caches
attr_cache_info, field_cache_info, prop_cache_info = get_cache_sizes()
#size = sum([sum([getsizeof(obj) for obj in dic.values()]) for dic in _attribute_cache.values()])/1024.0
#count = sum([len(dic) for dic in _attribute_cache.values()])
string += "\n{w On-entity Attribute cache usage:{n %5.2f MB (%i attrs)" % (attr_cache_info[1], attr_cache_info[0])
string += "\n{w On-entity Field cache usage:{n %5.2f MB (%i fields)" % (field_cache_info[1], field_cache_info[0])
string += "\n{w On-entity Property cache usage:{n %5.2f MB (%i props)" % (prop_cache_info[1], prop_cache_info[0])
caller.msg(string)

View file

@ -16,10 +16,12 @@ from src.server.sessionhandler import SESSIONS
from src.typeclasses.typeclass import TypeClass
from src.scripts.models import ScriptDB
from src.comms import channelhandler
from src.utils import logger
from src.utils import logger, is_pypy
from django.utils.translation import ugettext as _
__all__ = ("Script", "DoNothing", "CheckSessions", "ValidateScripts", "ValidateChannelHandler", "ClearAttributeCache")
__all__ = ["Script", "DoNothing", "CheckSessions", "ValidateScripts", "ValidateChannelHandler"]
if not is_pypy:
__all__.append("ClearAttributeCache")
_ATTRIBUTE_CACHE_MAXSIZE = settings.ATTRIBUTE_CACHE_MAXSIZE # attr-cache size in MB.
@ -446,16 +448,18 @@ class ValidateChannelHandler(Script):
#print "ValidateChannelHandler run."
channelhandler.CHANNELHANDLER.update()
class ClearAttributeCache(Script):
"Clear the attribute cache."
def at_script_creation(self):
"Setup the script"
self.key = "sys_cache_clear"
self.desc = _("Clears the Attribute Cache")
self.interval = 3600 * 2
self.persistent = True
def at_repeat(self):
"called every 2 hours. Sets a max attr-cache limit to 100 MB." # enough for normal usage?
attr_cache_size, _, _ = caches.get_cache_sizes()
if attr_cache_size > _ATTRIBUTE_CACHE_MAXSIZE:
caches.flush_attr_cache()
# PyPy does not support sys.getsizeof, so the attribute cache dump script is skipped here.
if not is_pypy:
class ClearAttributeCache(Script):
"Clear the attribute cache."
def at_script_creation(self):
"Setup the script"
self.key = "sys_cache_clear"
self.desc = _("Clears the Attribute Cache")
self.interval = 3600 * 2
self.persistent = True
def at_repeat(self):
"called every 2 hours. Sets a max attr-cache limit to 100 MB." # enough for normal usage?
attr_cache_size, _, _ = caches.get_cache_sizes()
if attr_cache_size > _ATTRIBUTE_CACHE_MAXSIZE:
caches.flush_attr_cache()

View file

@ -0,0 +1,4 @@
try:
import __pypy__ as is_pypy
except ImportError:
is_pypy = False