PEP8 cleanup of the entire codebase. Unchanged are many cases of too-long lines, partly because of the rewrite they would require but also because splitting many lines up would make the code harder to read. Also the third-party libraries (idmapper, prettytable etc) were not cleaned.

This commit is contained in:
Griatch 2013-11-14 19:31:17 +01:00
parent 30b7d2a405
commit 1ae17bcbe4
154 changed files with 5613 additions and 4054 deletions

View file

@ -4,10 +4,10 @@ Central caching module.
"""
from sys import getsizeof
import os, threading
import os
import threading
from collections import defaultdict
from django.core.cache import get_cache
from src.server.models import ServerConfig
from src.utils.utils import uses_database, to_str, get_evennia_pids
@ -35,6 +35,7 @@ if uses_database("mysql") and ServerConfig.objects.get_mysql_db_version() < '5.6
else:
_DATESTRING = "%Y:%m:%d-%H:%M:%S:%f"
def hashid(obj, suffix=""):
"""
Returns a per-class unique hash that combines the object's
@ -59,11 +60,15 @@ def hashid(obj, suffix=""):
# rely on memory adressing in this case.
date, idnum = "InMemory", id(obj)
if not idnum or not date:
# this will happen if setting properties on an object which is not yet saved
# this will happen if setting properties on an object which
# is not yet saved
return None
# we have to remove the class-name's space, for eventual use
# of memcached
hid = "%s-%s-#%s" % (_GA(obj, "__class__"), date, idnum)
hid = hid.replace(" ", "") # we have to remove the class-name's space, for memcached's sake
# we cache the object part of the hashid to avoid too many object lookups
hid = hid.replace(" ", "")
# we cache the object part of the hashid to avoid too many
# object lookups
_SA(obj, "_hashid", hid)
# build the complete hashid
hid = "%s%s" % (hid, suffix)
@ -84,8 +89,9 @@ def field_pre_save(sender, instance=None, update_fields=None, raw=False, **kwarg
"""
Called at the beginning of the field save operation. The save method
must be called with the update_fields keyword in order to be most efficient.
This method should NOT save; rather it is the save() that triggers this function.
Its main purpose is to allow to plug-in a save handler and oob handlers.
This method should NOT save; rather it is the save() that triggers this
function. Its main purpose is to allow to plug-in a save handler and oob
handlers.
"""
if raw:
return
@ -102,12 +108,14 @@ def field_pre_save(sender, instance=None, update_fields=None, raw=False, **kwarg
if callable(handler):
handler()
def field_post_save(sender, instance=None, update_fields=None, raw=False, **kwargs):
"""
Called at the beginning of the field save operation. The save method
must be called with the update_fields keyword in order to be most efficient.
This method should NOT save; rather it is the save() that triggers this function.
Its main purpose is to allow to plug-in a save handler and oob handlers.
This method should NOT save; rather it is the save() that triggers this
function. Its main purpose is to allow to plug-in a save handler and oob
handlers.
"""
if raw:
return
@ -127,70 +135,6 @@ def field_post_save(sender, instance=None, update_fields=None, raw=False, **kwar
if trackerhandler:
trackerhandler.update(fieldname, _GA(instance, fieldname))
#------------------------------------------------------------
# Attr cache - caching the attribute objects related to a given object to
# avoid lookups more than necessary (this makes Attributes en par in speed
# to any property).
#------------------------------------------------------------
## connected to m2m_changed signal in respective model class
#def post_attr_update(sender, **kwargs):
# "Called when the many2many relation changes (NOT when updating the value of an Attribute!)"
# obj = kwargs['instance']
# model = kwargs['model']
# action = kwargs['action']
# if kwargs['reverse']:
# # the reverse relation changed (the Attribute itself was acted on)
# pass
# else:
# # forward relation changed (the Object holding the Attribute m2m field)
# if not kwargs["pk_set"]:
# return
# if action == "post_add":
# # cache all added objects
# for attr_id in kwargs["pk_set"]:
# attr_obj = model.objects.get(pk=attr_id)
# set_attr_cache(obj, _GA(attr_obj, "db_key"), attr_obj)
# elif action == "post_remove":
# # obj.db_attributes.remove(attr) was called
# for attr_id in kwargs["pk_set"]:
# attr_obj = model.objects.get(pk=attr_id)
# del_attr_cache(obj, _GA(attr_obj, "db_key"))
# attr_obj.delete()
# elif action == "post_clear":
# # obj.db_attributes.clear() was called
# clear_obj_attr_cache(obj)
#
#
## attr cache - this is only left as deprecated cache
#
#def get_attr_cache(obj, attrname):
# "Called by getting attribute"
# hid = hashid(obj, "-%s" % attrname)
# return _ATTR_CACHE.get(hid, None)
#
#def set_attr_cache(obj, attrname, attrobj):
# "Set the attr cache manually; this can be used to update"
# global _ATTR_CACHE
# hid = hashid(obj, "-%s" % attrname)
# _ATTR_CACHE[hid] = attrobj
#
#def del_attr_cache(obj, attrname):
# "Del attribute cache"
# global _ATTR_CACHE
# hid = hashid(obj, "-%s" % attrname)
# if hid in _ATTR_CACHE:
# del _ATTR_CACHE[hid]
#
#def flush_attr_cache():
# "Clear attribute cache"
# global _ATTR_CACHE
# _ATTR_CACHE = {}
#
#def clear_obj_attr_cache(obj):
# global _ATTR_CACHE
# hid = hashid(obj)
# _ATTR_CACHE = {key:value for key, value in _ATTR_CACHE if not key.startswith(hid)}
#------------------------------------------------------------
# Property cache - this is a generic cache for properties stored on models.
@ -203,12 +147,14 @@ def get_prop_cache(obj, propname):
hid = hashid(obj, "-%s" % propname)
return _PROP_CACHE[hid].get(propname, None) if hid else None
def set_prop_cache(obj, propname, propvalue):
"Set property cache"
hid = hashid(obj, "-%s" % propname)
if hid:
_PROP_CACHE[hid][propname] = propvalue
def del_prop_cache(obj, propname):
"Delete element from property cache"
hid = hashid(obj, "-%s" % propname)
@ -216,11 +162,12 @@ def del_prop_cache(obj, propname):
if propname in _PROP_CACHE[hid]:
del _PROP_CACHE[hid][propname]
def flush_prop_cache():
"Clear property cache"
global _PROP_CACHE
_PROP_CACHE = defaultdict(dict)
#_PROP_CACHE.clear()
def get_cache_sizes():
"""
@ -229,8 +176,8 @@ def get_cache_sizes():
global _ATTR_CACHE, _PROP_CACHE
attr_n = len(_ATTR_CACHE)
attr_mb = sum(getsizeof(obj) for obj in _ATTR_CACHE) / 1024.0
field_n = 0 #sum(len(dic) for dic in _FIELD_CACHE.values())
field_mb = 0 # sum(sum([getsizeof(obj) for obj in dic.values()]) for dic in _FIELD_CACHE.values()) / 1024.0
field_n = 0 # sum(len(dic) for dic in _FIELD_CACHE.values())
field_mb = 0 # sum(sum([getsizeof(obj) for obj in dic.values()]) for dic in _FIELD_CACHE.values()) / 1024.0
prop_n = sum(len(dic) for dic in _PROP_CACHE.values())
prop_mb = sum(sum([getsizeof(obj) for obj in dic.values()]) for dic in _PROP_CACHE.values()) / 1024.0
return (attr_n, attr_mb), (field_n, field_mb), (prop_n, prop_mb)