From 230d73cfa033ba9d1ce8c51467bef028641ea53b Mon Sep 17 00:00:00 2001 From: Griatch Date: Sat, 13 Apr 2013 23:50:33 +0200 Subject: [PATCH] Migrations work under sqlite3, both from latest dev, from new install and from trunk. There might be some consistency issues though (it seems the character list is not properly migrated), so more testing is required. --- ...02_auto__del_field_objattribute_db_mode.py | 86 ++- src/objects/migrations/0008_auto.py | 4 +- .../migrations/0009_converting_attributes.py | 297 ++++++++-- .../migrations/0010_converting_attributes.py | 33 +- .../migrations/0019_convert_attrdata.py | 514 +++++++++++++++++ ...auto__del_field_playerattribute_db_mode.py | 437 ++++++++++++++- .../migrations/0008_converting_attributes.py | 271 ++++++++- .../migrations/0009_converting_attributes.py | 103 ++-- .../0014_add_attr__playable_characters.py | 18 +- .../migrations/0018_convert_attrdata.py | 444 +++++++++++++++ ...auto__del_field_scriptattribute_db_mode.py | 95 +++- .../migrations/0005_converting_attributes.py | 267 ++++++++- .../migrations/0006_converting_attributes.py | 100 ++-- .../migrations/0011_convert_attrdata.py | 515 ++++++++++++++++++ src/typeclasses/models.py | 216 -------- src/utils/dbserialize.py | 4 +- 16 files changed, 2941 insertions(+), 463 deletions(-) create mode 100644 src/objects/migrations/0019_convert_attrdata.py create mode 100644 src/players/migrations/0018_convert_attrdata.py create mode 100644 src/scripts/migrations/0011_convert_attrdata.py diff --git a/src/objects/migrations/0002_auto__del_field_objattribute_db_mode.py b/src/objects/migrations/0002_auto__del_field_objattribute_db_mode.py index f1be4dc3d2..6c17ca99a6 100644 --- a/src/objects/migrations/0002_auto__del_field_objattribute_db_mode.py +++ b/src/objects/migrations/0002_auto__del_field_objattribute_db_mode.py @@ -4,36 +4,80 @@ from south.db import db from south.v2 import SchemaMigration from django.db import models +# overloading pickle to have it find the PackedDBobj in this module +import pickle + +try: + from cStringIO import StringIO +except ImportError: + from StringIO import StringIO + +renametable = { + 'src.typeclasses.models': 'src.objects.migrations.0019_convert_attrdata', + 'PackedDBobject': 'PackedDBobject', +} + +def mapname(name): + if name in renametable: + return renametable[name] + return name + +def mapped_load_global(self): + module = mapname(self.readline()[:-1]) + name = mapname(self.readline()[:-1]) + klass = self.find_class(module, name) + self.append(klass) + +def loads(str): + file = StringIO(str) + unpickler = pickle.Unpickler(file) + unpickler.dispatch[pickle.GLOBAL] = mapped_load_global + return unpickler.load() + +class PackedDBobject(object): + """ + Attribute helper class. + A container for storing and easily identifying database objects in + the database (which doesn't suppport storing db_objects directly). + """ + def __init__(self, ID, db_model, db_key): + self.id = ID + self.db_model = db_model + self.key = db_key + def __str__(self): + return "%s(#%s)" % (self.key, self.id) + def __unicode__(self): + return u"%s(#%s)" % (self.key, self.id) + class Migration(SchemaMigration): def forwards(self, orm): - - # Deleting field 'ObjAttribute.db_mode' - from src.objects.models import ObjAttribute - from src.typeclasses.models import PackedDBobject - for attr in ObjAttribute.objects.all(): - # resave attributes - db_mode = attr.db_mode - if db_mode and db_mode != 'pickle': - # an object. We need to resave this. - if db_mode == 'object': - val = PackedDBobject(attr.db_value, "objectdb") - elif db_mode == 'player': - val = PackedDBobject(attr.db_value, "playerdb") - elif db_mode == 'script': - val = PackedDBobject(attr.db_value, "scriptdb") - elif db_mode == 'help': - val = PackedDBobject(attr.db_value, "helpentry") - else: - val = PackedDBobject(attr.db_value, db_mode) # channel, msg - attr.value = val + # Deleting field 'ObjAttribute.db_mode' + + if not db.dry_run: + for attr in orm["objects.ObjAttribute"].objects.all(): + # resave attributes + db_mode = attr.db_mode + if db_mode and db_mode != 'pickle': + # an object. We need to resave this. + if db_mode == 'object': + val = PackedDBobject(attr.db_value, "objectdb") + elif db_mode == 'player': + val = PackedDBobject(attr.db_value, "playerdb") + elif db_mode == 'script': + val = PackedDBobject(attr.db_value, "scriptdb") + elif db_mode == 'help': + val = PackedDBobject(attr.db_value, "helpentry") + else: + val = PackedDBobject(attr.db_value, db_mode) # channel, msg + attr.value = val db.delete_column('objects_objattribute', 'db_mode') def backwards(self, orm): - + # Adding field 'ObjAttribute.db_mode' db.add_column('objects_objattribute', 'db_mode', self.gf('django.db.models.fields.CharField')(max_length=20, null=True, blank=True), keep_default=False) diff --git a/src/objects/migrations/0008_auto.py b/src/objects/migrations/0008_auto.py index 142d07fe10..0a6ab7995a 100644 --- a/src/objects/migrations/0008_auto.py +++ b/src/objects/migrations/0008_auto.py @@ -7,7 +7,7 @@ from django.db import models class Migration(SchemaMigration): def forwards(self, orm): - + # Adding index on 'ObjectDB', fields ['db_key'] db.create_index('objects_objectdb', ['db_key']) @@ -19,7 +19,7 @@ class Migration(SchemaMigration): def backwards(self, orm): - + # Removing index on 'ObjAttribute', fields ['db_key'] db.delete_index('objects_objattribute', ['db_key']) diff --git a/src/objects/migrations/0009_converting_attributes.py b/src/objects/migrations/0009_converting_attributes.py index 1782f775da..4368b592de 100644 --- a/src/objects/migrations/0009_converting_attributes.py +++ b/src/objects/migrations/0009_converting_attributes.py @@ -9,14 +9,228 @@ try: except ImportError: import pickle from src.utils.utils import to_str, to_unicode -from src.typeclasses.models import PackedDBobject,PackedDict,PackedList - from django.contrib.contenttypes.models import ContentType CTYPEGET = ContentType.objects.get GA = object.__getattribute__ SA = object.__setattr__ DA = object.__delattr__ +class PackedDBobject(object): + """ + Attribute helper class. + A container for storing and easily identifying database objects in + the database (which doesn't suppport storing db_objects directly). + """ + def __init__(self, ID, db_model, db_key): + self.id = ID + self.db_model = db_model + self.key = db_key + def __str__(self): + return "%s(#%s)" % (self.key, self.id) + def __unicode__(self): + return u"%s(#%s)" % (self.key, self.id) + +class PackedDict(dict): + """ + Attribute helper class. + A variant of dict that stores itself to the database when + updating one of its keys. This is called and handled by + Attribute.validate_data(). + """ + def __init__(self, db_obj, *args, **kwargs): + """ + Sets up the packing dict. The db_store variable + is set by Attribute.validate_data() when returned in + order to allow custom updates to the dict. + + db_obj - the Attribute object storing this dict. + + The 'parent' property is set to 'init' at creation, + this stops the system from saving itself over and over + when first assigning the dict. Once initialization + is over, the Attribute from_attr() method will assign + the parent (or None, if at the root) + + """ + self.db_obj = db_obj + self.parent = 'init' + super(PackedDict, self).__init__(*args, **kwargs) + def __str__(self): + return "{%s}" % ", ".join("%s:%s" % (key, str(val)) for key, val in self.items()) + def save(self): + "Relay save operation upwards in tree until we hit the root." + if self.parent == 'init': + pass + elif self.parent: + self.parent.save() + else: + self.db_obj.value = self + def __setitem__(self, *args, **kwargs): + "assign item to this dict" + super(PackedDict, self).__setitem__(*args, **kwargs) + self.save() + def __delitem__(self, *args, **kwargs): + "delete with del self[key]" + super(PackedDict, self).__delitem__(*args, **kwargs) + self.save() + def clear(self, *args, **kwargs): + "Custom clear" + super(PackedDict, self).clear(*args, **kwargs) + self.save() + def pop(self, *args, **kwargs): + "Custom pop" + ret = super(PackedDict, self).pop(*args, **kwargs) + self.save() + return ret + def popitem(self, *args, **kwargs): + "Custom popitem" + ret = super(PackedDict, self).popitem(*args, **kwargs) + self.save() + return ret + def setdefault(self, *args, **kwargs): + "Custom setdefault" + super(PackedDict, self).setdefault(*args, **kwargs) + self.save() + def update(self, *args, **kwargs): + "Custom update" + super(PackedDict, self).update(*args, **kwargs) + self.save() + +class PackedList(list): + """ + Attribute helper class. + A variant of list that stores itself to the database when + updating one of its keys. This is called and handled by + Attribute.validate_data(). + """ + def __init__(self, db_obj, *args, **kwargs): + """ + sets up the packing list. + db_obj - the attribute object storing this list. + + the 'parent' property is set to 'init' at creation, + this stops the system from saving itself over and over + when first assigning the dict. once initialization + is over, the attribute from_attr() method will assign + the parent (or none, if at the root) + + """ + self.db_obj = db_obj + self.parent = 'init' + super(PackedList, self).__init__(*args, **kwargs) + def __str__(self): + return "[%s]" % ", ".join(str(val) for val in self) + def save(self): + "relay save operation upwards in tree until we hit the root." + if self.parent == 'init': + pass + elif self.parent: + self.parent.save() + else: + self.db_obj.value = self + def __setitem__(self, *args, **kwargs): + "Custom setitem that stores changed list to database." + super(PackedList, self).__setitem__(*args, **kwargs) + self.save() + def __delitem__(self, *args, **kwargs): + "delete with del self[index]" + super(PackedList, self).__delitem__(*args, **kwargs) + self.save() + def append(self, *args, **kwargs): + "Custom append" + super(PackedList, self).append(*args, **kwargs) + self.save() + def extend(self, *args, **kwargs): + "Custom extend" + super(PackedList, self).extend(*args, **kwargs) + self.save() + def insert(self, *args, **kwargs): + "Custom insert" + super(PackedList, self).insert(*args, **kwargs) + self.save() + def remove(self, *args, **kwargs): + "Custom remove" + super(PackedList, self).remove(*args, **kwargs) + self.save() + def pop(self, *args, **kwargs): + "Custom pop" + ret = super(PackedList, self).pop(*args, **kwargs) + self.save() + return ret + def reverse(self, *args, **kwargs): + "Custom reverse" + super(PackedList, self).reverse(*args, **kwargs) + self.save() + def sort(self, *args, **kwargs): + "Custom sort" + super(PackedList, self).sort(*args, **kwargs) + self.save() + +class PackedSet(set): + """ + A variant of Set that stores new updates to the databse. + """ + def __init__(self, db_obj, *args, **kwargs): + """ + sets up the packing set. + db_obj - the attribute object storing this set + + the 'parent' property is set to 'init' at creation, + this stops the system from saving itself over and over + when first assigning the dict. once initialization + is over, the attribute from_attr() method will assign + the parent (or none, if at the root) + + """ + self.db_obj = db_obj + self.parent = 'init' + super(PackedSet, self).__init__(*args, **kwargs) + def __str__(self): + return "{%s}" % ", ".join(str(val) for val in self) + def save(self): + "relay save operation upwards in tree until we hit the root." + if self.parent == 'init': + pass + elif self.parent: + self.parent.save() + else: + self.db_obj.value = self + def add(self, *args, **kwargs): + "Add an element to the set" + super(PackedSet, self).add(*args, **kwargs) + self.save() + def clear(self, *args, **kwargs): + "Remove all elements from this set" + super(PackedSet, self).clear(*args, **kwargs) + self.save() + def difference_update(self, *args, **kwargs): + "Remove all elements of another set from this set." + super(PackedSet, self).difference_update(*args, **kwargs) + self.save() + def discard(self, *args, **kwargs): + "Remove an element from a set if it is a member.\nIf not a member, do nothing." + super(PackedSet, self).discard(*args, **kwargs) + self.save() + def intersection_update(self, *args, **kwargs): + "Update a set with the intersection of itself and another." + super(PackedSet, self).intersection_update(*args, **kwargs) + self.save() + def pop(self, *args, **kwargs): + "Remove and return an arbitrary set element.\nRaises KeyError if the set is empty." + super(PackedSet, self).pop(*args, **kwargs) + self.save() + def remove(self, *args, **kwargs): + "Remove an element from a set; it must be a member.\nIf the element is not a member, raise a KeyError." + super(PackedSet, self).remove(*args, **kwargs) + self.save() + def symmetric_difference_update(self, *args, **kwargs): + "Update a set with the symmetric difference of itself and another." + super(PackedSet, self).symmetric_difference_update(*args, **kwargs) + self.save() + def update(self, *args, **kwargs): + "Update a set with the union of itself and others." + super(PackedSet, self).update(*args, **kwargs) + self.save() def to_attr(data): """ Convert data to proper attr data format before saving @@ -32,13 +246,13 @@ def to_attr(data): (and any nested combination of them) this way, all other iterables are stored and returned as lists. - data storage format: + data storage format: (simple|dbobj|iter, ) - where + where simple - a single non-db object, like a string or number dbobj - a single dbobj iter - any iterable object - will be looped over recursively - to convert dbobj->id. + to convert dbobj->id. """ @@ -49,7 +263,7 @@ def to_attr(data): """ dtype = type(item) if dtype in (basestring, int, float): # check the most common types first, for speed - return item + return item elif hasattr(item, "id") and hasattr(item, "db_model_name") and hasattr(item, "db_key"): db_model_name = item.db_model_name if db_model_name == "typeclass": @@ -75,8 +289,8 @@ def to_attr(data): if db_model_name == "typeclass": # typeclass cannot help us, we want the actual child object model name db_model_name = GA(data.dbobj, "db_model_name") - return ("dbobj", PackedDBobject(data.id, db_model_name, data.db_key)) - elif hasattr(data, "__iter__"): + return ("dbobj", PackedDBobject(data.id, db_model_name, data.db_key)) + elif hasattr(data, "__iter__"): return ("iter", iter_db2id(data)) else: return ("simple", data) @@ -84,21 +298,21 @@ def to_attr(data): def from_attr(attr, datatuple): """ Retrieve data from a previously stored attribute. This - is always a dict with keys type and data. + is always a dict with keys type and data. - datatuple comes from the database storage and has - the following format: + datatuple comes from the database storage and has + the following format: (simple|dbobj|iter, ) where simple - a single non-db object, like a string. is returned as-is. - dbobj - a single dbobj-id. This id is retrieved back from the database. + dbobj - a single dbobj-id. This id is retrieved back from the database. iter - an iterable. This is traversed iteratively, converting all found - dbobj-ids back to objects. Also, all lists and dictionaries are - returned as their PackedList/PackedDict counterparts in order to + dbobj-ids back to objects. Also, all lists and dictionaries are + returned as their PackedList/PackedDict counterparts in order to allow in-place assignment such as obj.db.mylist[3] = val. Mylist - is then a PackedList that saves the data on the fly. + is then a PackedList that saves the data on the fly. """ - # nested functions + # nested functions def id2db(data): """ Convert db-stored dbref back to object @@ -111,39 +325,39 @@ def from_attr(attr, datatuple): try: return mclass.objects.get(id=data.id) except mclass.DoesNotExist: # could happen if object was deleted in the interim. - return None + return None def iter_id2db(item): """ Recursively looping through stored iterables, replacing ids with actual objects. We return PackedDict and PackedLists instead of normal lists; this is needed in order for the user to do dynamic saving of nested in-place, such as obj.db.attrlist[2]=3. What is - stored in the database are however always normal python primitives. + stored in the database are however always normal python primitives. """ dtype = type(item) if dtype in (basestring, int, float): # check the most common types first, for speed - return item + return item elif dtype == PackedDBobject: return id2db(item) - elif dtype == tuple: + elif dtype == tuple: return tuple([iter_id2db(val) for val in item]) elif dtype in (dict, PackedDict): return PackedDict(attr, dict(zip([key for key in item.keys()], [iter_id2db(val) for val in item.values()]))) elif hasattr(item, '__iter__'): return PackedList(attr, list(iter_id2db(val) for val in item)) - else: - return item + else: + return item typ, data = datatuple - if typ == 'simple': + if typ == 'simple': # single non-db objects return data - elif typ == 'dbobj': - # a single stored dbobj + elif typ == 'dbobj': + # a single stored dbobj return id2db(data) - elif typ == 'iter': + elif typ == 'iter': # all types of iterables return iter_id2db(data) @@ -152,21 +366,22 @@ class Migration(DataMigration): def forwards(self, orm): "Write your forwards methods here." - for attr in orm.ObjAttribute.objects.all(): - # repack attr into new format, and reimport - try: - val = pickle.loads(to_str(attr.db_value)) - if hasattr(val, '__iter__'): - val = ("iter", val) - elif type(val) == PackedDBobject: - val = ("dbobj", val) - else: - val = ("simple", val) - attr.db_value = to_unicode(pickle.dumps(to_str(to_attr(from_attr(attr, val))))) - attr.save() - except TypeError, RuntimeError: - pass - + if not db.dry_run: + for attr in orm.ObjAttribute.objects.all(): + # repack attr into new format, and reimport + try: + val = pickle.loads(to_str(attr.db_value)) + if hasattr(val, '__iter__'): + val = ("iter", val) + elif type(val) == PackedDBobject: + val = ("dbobj", val) + else: + val = ("simple", val) + attr.db_value = to_unicode(pickle.dumps(to_str(to_attr(from_attr(attr, val))))) + attr.save() + except TypeError, RuntimeError: + pass + def backwards(self, orm): "Write your backwards methods here." raise RuntimeError diff --git a/src/objects/migrations/0010_converting_attributes.py b/src/objects/migrations/0010_converting_attributes.py index 2efda220dc..253c20e391 100644 --- a/src/objects/migrations/0010_converting_attributes.py +++ b/src/objects/migrations/0010_converting_attributes.py @@ -9,7 +9,7 @@ try: except ImportError: import pickle from src.utils.utils import to_str, to_unicode -from src.typeclasses.models import PackedDBobject +#from src.typeclasses.models import PackedDBobject from django.contrib.contenttypes.models import ContentType CTYPEGET = ContentType.objects.get @@ -17,6 +17,37 @@ GA = object.__getattribute__ SA = object.__setattr__ DA = object.__delattr__ +class PackedDBobject(object): + """ + Attribute helper class. + A container for storing and easily identifying database objects in + the database (which doesn't suppport storing db_objects directly). + """ + def __init__(self, ID, db_model, db_key): + self.id = ID + self.db_model = db_model + self.key = db_key + def __str__(self): + return "%s(#%s)" % (self.key, self.id) + def __unicode__(self): + return u"%s(#%s)" % (self.key, self.id) + +class PackedDBobject(object): + """ + Attribute helper class. + A container for storing and easily identifying database objects in + the database (which doesn't suppport storing db_objects directly). + """ + def __init__(self, ID, db_model, db_key): + self.id = ID + self.db_model = db_model + self.key = db_key + def __str__(self): + return "%s(#%s)" % (self.key, self.id) + def __unicode__(self): + return u"%s(#%s)" % (self.key, self.id) + + class PackedDict(dict): """ Attribute helper class. diff --git a/src/objects/migrations/0019_convert_attrdata.py b/src/objects/migrations/0019_convert_attrdata.py new file mode 100644 index 0000000000..bffb735f4a --- /dev/null +++ b/src/objects/migrations/0019_convert_attrdata.py @@ -0,0 +1,514 @@ +# -*- coding: utf-8 -*- +import datetime +from south.db import db +from south.v2 import DataMigration +from django.db import models +from django.contrib.contenttypes.models import ContentType + +from src.utils.utils import to_str +from src.utils.dbserialize import to_pickle +try: + import cPickle as pickle +except ImportError: + import pickle +CTYPEGET = ContentType.objects.get +GA = object.__getattribute__ +SA = object.__setattr__ +DA = object.__delattr__ + + +# overloading pickle to have it find the PackedDBobj in this module +import pickle + +try: + from cStringIO import StringIO +except ImportError: + from StringIO import StringIO + +renametable = { + 'src.typeclasses.models': 'src.objects.migrations.0019_convert_attrdata', + 'PackedDBobject': 'PackedDBobject', +} + +def mapname(name): + if name in renametable: + return renametable[name] + return name + +def mapped_load_global(self): + module = mapname(self.readline()[:-1]) + name = mapname(self.readline()[:-1]) + klass = self.find_class(module, name) + self.append(klass) + +def loads(str): + file = StringIO(str) + unpickler = pickle.Unpickler(file) + unpickler.dispatch[pickle.GLOBAL] = mapped_load_global + return unpickler.load() + + + +class PackedDBobject(object): + """ + Attribute helper class. + A container for storing and easily identifying database objects in + the database (which doesn't suppport storing db_objects directly). + """ + def __init__(self, ID, db_model, db_key): + self.id = ID + self.db_model = db_model + self.key = db_key + def __str__(self): + return "%s(#%s)" % (self.key, self.id) + def __unicode__(self): + return u"%s(#%s)" % (self.key, self.id) + +class PackedDict(dict): + """ + Attribute helper class. + A variant of dict that stores itself to the database when + updating one of its keys. This is called and handled by + Attribute.validate_data(). + """ + def __init__(self, db_obj, *args, **kwargs): + """ + Sets up the packing dict. The db_store variable + is set by Attribute.validate_data() when returned in + order to allow custom updates to the dict. + + db_obj - the Attribute object storing this dict. + + The 'parent' property is set to 'init' at creation, + this stops the system from saving itself over and over + when first assigning the dict. Once initialization + is over, the Attribute from_attr() method will assign + the parent (or None, if at the root) + + """ + self.db_obj = db_obj + self.parent = 'init' + super(PackedDict, self).__init__(*args, **kwargs) + def __str__(self): + return "{%s}" % ", ".join("%s:%s" % (key, str(val)) for key, val in self.items()) + def save(self): + "Relay save operation upwards in tree until we hit the root." + if self.parent == 'init': + pass + elif self.parent: + self.parent.save() + else: + self.db_obj.value = self + def __setitem__(self, *args, **kwargs): + "assign item to this dict" + super(PackedDict, self).__setitem__(*args, **kwargs) + self.save() + def __delitem__(self, *args, **kwargs): + "delete with del self[key]" + super(PackedDict, self).__delitem__(*args, **kwargs) + self.save() + def clear(self, *args, **kwargs): + "Custom clear" + super(PackedDict, self).clear(*args, **kwargs) + self.save() + def pop(self, *args, **kwargs): + "Custom pop" + ret = super(PackedDict, self).pop(*args, **kwargs) + self.save() + return ret + def popitem(self, *args, **kwargs): + "Custom popitem" + ret = super(PackedDict, self).popitem(*args, **kwargs) + self.save() + return ret + def setdefault(self, *args, **kwargs): + "Custom setdefault" + super(PackedDict, self).setdefault(*args, **kwargs) + self.save() + def update(self, *args, **kwargs): + "Custom update" + super(PackedDict, self).update(*args, **kwargs) + self.save() + +class PackedList(list): + """ + Attribute helper class. + A variant of list that stores itself to the database when + updating one of its keys. This is called and handled by + Attribute.validate_data(). + """ + def __init__(self, db_obj, *args, **kwargs): + """ + sets up the packing list. + db_obj - the attribute object storing this list. + + the 'parent' property is set to 'init' at creation, + this stops the system from saving itself over and over + when first assigning the dict. once initialization + is over, the attribute from_attr() method will assign + the parent (or none, if at the root) + + """ + self.db_obj = db_obj + self.parent = 'init' + super(PackedList, self).__init__(*args, **kwargs) + def __str__(self): + return "[%s]" % ", ".join(str(val) for val in self) + def save(self): + "relay save operation upwards in tree until we hit the root." + if self.parent == 'init': + pass + elif self.parent: + self.parent.save() + else: + self.db_obj.value = self + def __setitem__(self, *args, **kwargs): + "Custom setitem that stores changed list to database." + super(PackedList, self).__setitem__(*args, **kwargs) + self.save() + def __delitem__(self, *args, **kwargs): + "delete with del self[index]" + super(PackedList, self).__delitem__(*args, **kwargs) + self.save() + def append(self, *args, **kwargs): + "Custom append" + super(PackedList, self).append(*args, **kwargs) + self.save() + def extend(self, *args, **kwargs): + "Custom extend" + super(PackedList, self).extend(*args, **kwargs) + self.save() + def insert(self, *args, **kwargs): + "Custom insert" + super(PackedList, self).insert(*args, **kwargs) + self.save() + def remove(self, *args, **kwargs): + "Custom remove" + super(PackedList, self).remove(*args, **kwargs) + self.save() + def pop(self, *args, **kwargs): + "Custom pop" + ret = super(PackedList, self).pop(*args, **kwargs) + self.save() + return ret + def reverse(self, *args, **kwargs): + "Custom reverse" + super(PackedList, self).reverse(*args, **kwargs) + self.save() + def sort(self, *args, **kwargs): + "Custom sort" + super(PackedList, self).sort(*args, **kwargs) + self.save() + +class PackedSet(set): + """ + A variant of Set that stores new updates to the databse. + """ + def __init__(self, db_obj, *args, **kwargs): + """ + sets up the packing set. + db_obj - the attribute object storing this set + + the 'parent' property is set to 'init' at creation, + this stops the system from saving itself over and over + when first assigning the dict. once initialization + is over, the attribute from_attr() method will assign + the parent (or none, if at the root) + + """ + self.db_obj = db_obj + self.parent = 'init' + super(PackedSet, self).__init__(*args, **kwargs) + def __str__(self): + return "{%s}" % ", ".join(str(val) for val in self) + def save(self): + "relay save operation upwards in tree until we hit the root." + if self.parent == 'init': + pass + elif self.parent: + self.parent.save() + else: + self.db_obj.value = self + def add(self, *args, **kwargs): + "Add an element to the set" + super(PackedSet, self).add(*args, **kwargs) + self.save() + def clear(self, *args, **kwargs): + "Remove all elements from this set" + super(PackedSet, self).clear(*args, **kwargs) + self.save() + def difference_update(self, *args, **kwargs): + "Remove all elements of another set from this set." + super(PackedSet, self).difference_update(*args, **kwargs) + self.save() + def discard(self, *args, **kwargs): + "Remove an element from a set if it is a member.\nIf not a member, do nothing." + super(PackedSet, self).discard(*args, **kwargs) + self.save() + def intersection_update(self, *args, **kwargs): + "Update a set with the intersection of itself and another." + super(PackedSet, self).intersection_update(*args, **kwargs) + self.save() + def pop(self, *args, **kwargs): + "Remove and return an arbitrary set element.\nRaises KeyError if the set is empty." + super(PackedSet, self).pop(*args, **kwargs) + self.save() + def remove(self, *args, **kwargs): + "Remove an element from a set; it must be a member.\nIf the element is not a member, raise a KeyError." + super(PackedSet, self).remove(*args, **kwargs) + self.save() + def symmetric_difference_update(self, *args, **kwargs): + "Update a set with the symmetric difference of itself and another." + super(PackedSet, self).symmetric_difference_update(*args, **kwargs) + self.save() + def update(self, *args, **kwargs): + "Update a set with the union of itself and others." + super(PackedSet, self).update(*args, **kwargs) + self.save() + +def to_attr(data): + """ + Convert data to proper attr data format before saving + + We have to make sure to not store database objects raw, since + this will crash the system. Instead we must store their IDs + and make sure to convert back when the attribute is read back + later. + + Due to this it's criticial that we check all iterables + recursively, converting all found database objects to a form + the database can handle. We handle lists, tuples and dicts + (and any nested combination of them) this way, all other + iterables are stored and returned as lists. + + data storage format: + (simple|dbobj|iter, ) + where + simple - a single non-db object, like a string or number + dbobj - a single dbobj + iter - any iterable object - will be looped over recursively + to convert dbobj->id. + + """ + + def iter_db2id(item): + """ + recursively looping through stored iterables, replacing objects with ids. + (Python only builds nested functions once, so there is no overhead for nesting) + """ + dtype = type(item) + if dtype in (basestring, int, float): # check the most common types first, for speed + return item + elif hasattr(item, "id") and hasattr(item, "db_model_name") and hasattr(item, "db_key"): + db_model_name = item.db_model_name + if db_model_name == "typeclass": + db_model_name = GA(item.dbobj, "db_model_name") + return PackedDBobject(item.id, db_model_name, item.db_key) + elif dtype == tuple: + return tuple(iter_db2id(val) for val in item) + elif dtype in (dict, PackedDict): + return dict((key, iter_db2id(val)) for key, val in item.items()) + elif hasattr(item, '__iter__'): + return list(iter_db2id(val) for val in item) + else: + return item + + dtype = type(data) + + if dtype in (basestring, int, float): + return ("simple",data) + elif hasattr(data, "id") and hasattr(data, "db_model_name") and hasattr(data, 'db_key'): + # all django models (objectdb,scriptdb,playerdb,channel,msg,typeclass) + # have the protected property db_model_name hardcoded on themselves for speed. + db_model_name = data.db_model_name + if db_model_name == "typeclass": + # typeclass cannot help us, we want the actual child object model name + db_model_name = GA(data.dbobj, "db_model_name") + return ("dbobj", PackedDBobject(data.id, db_model_name, data.db_key)) + elif hasattr(data, "__iter__"): + return ("iter", iter_db2id(data)) + else: + return ("simple", data) + + +class Migration(DataMigration): + + def forwards(self, orm): + "Write your forwards methods here." + # Note: Remember to use orm['appname.ModelName'] rather than "from appname.models..." + + # modified for migration - converts to plain python properties + def from_attr(datatuple): + """ + Retrieve data from a previously stored attribute. This + is always a dict with keys type and data. + + datatuple comes from the database storage and has + the following format: + (simple|dbobj|iter, ) + where + simple - a single non-db object, like a string. is returned as-is. + dbobj - a single dbobj-id. This id is retrieved back from the database. + iter - an iterable. This is traversed iteratively, converting all found + dbobj-ids back to objects. Also, all lists and dictionaries are + returned as their PackedList/PackedDict counterparts in order to + allow in-place assignment such as obj.db.mylist[3] = val. Mylist + is then a PackedList that saves the data on the fly. + """ + # nested functions + def id2db(data): + """ + Convert db-stored dbref back to object + """ + #app_map = {'objectdb':".objects.objectdb", "scriptdb":"scripts.scriptdb", "playerdb":"players.playerdb"} + #print "data.db_model:", data.db_model + #mclass = orm[data.db_model].model_class() + mclass = CTYPEGET(model=data.db_model).model_class() + try: + return mclass.objects.get(id=data.id) + + except AttributeError: + try: + return mclass.objects.get(id=data.id) + except mclass.DoesNotExist: # could happen if object was deleted in the interim. + return None + + def iter_id2db(item): + """ + Recursively looping through stored iterables, replacing ids with actual objects. + We return PackedDict and PackedLists instead of normal lists; this is needed in order for + the user to do dynamic saving of nested in-place, such as obj.db.attrlist[2]=3. What is + stored in the database are however always normal python primitives. + """ + dtype = type(item) + if dtype in (basestring, int, float, long, bool): # check the most common types first, for speed + return item + elif dtype == PackedDBobject or hasattr(item, '__class__') and item.__class__.__name__ == "PackedDBobject": + return id2db(item) + elif dtype == tuple: + return tuple([iter_id2db(val) for val in item]) + elif dtype in (dict, PackedDict): + return dict(zip([key for key in item.keys()], + [iter_id2db(val) for val in item.values()])) + elif hasattr(item, '__iter__'): + return list(iter_id2db(val) for val in item) + else: + return item + + typ, data = datatuple + + if typ == 'simple': + # single non-db objects + return data + elif typ == 'dbobj': + # a single stored dbobj + return id2db(data) + elif typ == 'iter': + # all types of iterables + return iter_id2db(data) + + if not db.dry_run: + for attr in orm['objects.ObjAttribute'].objects.all(): + # repack attr into new format and reimport + datatuple = loads(to_str(attr.db_value)) + python_data = from_attr(datatuple) + new_data = to_pickle(python_data) + attr.db_value2 = new_data # new pickleObjectField + attr.save() + + def backwards(self, orm): + "Write your backwards methods here." + raise RuntimeError("This migration cannot be reversed.") + + models = { + 'auth.group': { + 'Meta': {'object_name': 'Group'}, + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}), + 'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}) + }, + 'auth.permission': { + 'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'}, + 'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}), + 'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'name': ('django.db.models.fields.CharField', [], {'max_length': '50'}) + }, + 'auth.user': { + 'Meta': {'object_name': 'User'}, + 'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), + 'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}), + 'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}), + 'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}), + 'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), + 'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), + 'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), + 'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}), + 'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}), + 'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}), + 'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'}) + }, + 'contenttypes.contenttype': { + 'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"}, + 'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}), + 'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}) + }, + 'objects.alias': { + 'Meta': {'object_name': 'Alias'}, + 'db_key': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}), + 'db_obj': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['objects.ObjectDB']"}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}) + }, + 'objects.objattribute': { + 'Meta': {'object_name': 'ObjAttribute'}, + 'db_date_created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}), + 'db_key': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}), + 'db_lock_storage': ('django.db.models.fields.TextField', [], {'blank': 'True'}), + 'db_obj': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['objects.ObjectDB']"}), + 'db_value': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}), + 'db_value2': ('src.utils.picklefield.PickledObjectField', [], {'null': 'True'}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}) + }, + 'objects.objectdb': { + 'Meta': {'object_name': 'ObjectDB'}, + 'db_cmdset_storage': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}), + 'db_date_created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}), + 'db_destination': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'destinations_set'", 'null': 'True', 'to': "orm['objects.ObjectDB']"}), + 'db_home': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'homes_set'", 'null': 'True', 'to': "orm['objects.ObjectDB']"}), + 'db_key': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}), + 'db_location': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'locations_set'", 'null': 'True', 'to': "orm['objects.ObjectDB']"}), + 'db_lock_storage': ('django.db.models.fields.TextField', [], {'blank': 'True'}), + 'db_permissions': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}), + 'db_player': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['players.PlayerDB']", 'null': 'True', 'blank': 'True'}), + 'db_sessid': ('django.db.models.fields.IntegerField', [], {'null': 'True'}), + 'db_typeclass_path': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True'}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}) + }, + 'objects.objectnick': { + 'Meta': {'unique_together': "(('db_nick', 'db_type', 'db_obj'),)", 'object_name': 'ObjectNick'}, + 'db_nick': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}), + 'db_obj': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['objects.ObjectDB']"}), + 'db_real': ('django.db.models.fields.TextField', [], {}), + 'db_type': ('django.db.models.fields.CharField', [], {'default': "'inputline'", 'max_length': '16', 'null': 'True', 'blank': 'True'}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}) + }, + 'players.playerdb': { + 'Meta': {'object_name': 'PlayerDB'}, + 'db_cmdset_storage': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True'}), + 'db_date_created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}), + 'db_is_connected': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), + 'db_key': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}), + 'db_lock_storage': ('django.db.models.fields.TextField', [], {'blank': 'True'}), + 'db_permissions': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}), + 'db_typeclass_path': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True'}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'unique': 'True'}) + } + } + + complete_apps = ['objects'] + symmetrical = True diff --git a/src/players/migrations/0002_auto__del_field_playerattribute_db_mode.py b/src/players/migrations/0002_auto__del_field_playerattribute_db_mode.py index 3a30ec0bd8..e7872bf7ed 100644 --- a/src/players/migrations/0002_auto__del_field_playerattribute_db_mode.py +++ b/src/players/migrations/0002_auto__del_field_playerattribute_db_mode.py @@ -4,36 +4,431 @@ from south.db import db from south.v2 import SchemaMigration from django.db import models +class PackedDBobject(object): + """ + Attribute helper class. + A container for storing and easily identifying database objects in + the database (which doesn't suppport storing db_objects directly). + """ + def __init__(self, ID, db_model, db_key): + self.id = ID + self.db_model = db_model + self.key = db_key + def __str__(self): + return "%s(#%s)" % (self.key, self.id) + def __unicode__(self): + return u"%s(#%s)" % (self.key, self.id) + +# overloading pickle to have it find the PackedDBobj in this module +import pickle + +try: + from cStringIO import StringIO +except ImportError: + from StringIO import StringIO + +renametable = { + 'src.typeclasses.models': 'src.players.migrations.0018_convert_attrdata', + 'PackedDBobject': 'PackedDBobject', +} + +def mapname(name): + if name in renametable: + return renametable[name] + return name + +def mapped_load_global(self): + module = mapname(self.readline()[:-1]) + name = mapname(self.readline()[:-1]) + klass = self.find_class(module, name) + self.append(klass) + +def loads(str): + file = StringIO(str) + unpickler = pickle.Unpickler(file) + unpickler.dispatch[pickle.GLOBAL] = mapped_load_global + return unpickler.load() + + + +class PackedDBobject(object): + """ + Attribute helper class. + A container for storing and easily identifying database objects in + the database (which doesn't suppport storing db_objects directly). + """ + def __init__(self, ID, db_model, db_key): + self.id = ID + self.db_model = db_model + self.key = db_key + def __str__(self): + return "%s(#%s)" % (self.key, self.id) + def __unicode__(self): + return u"%s(#%s)" % (self.key, self.id) + +class PackedDict(dict): + """ + Attribute helper class. + A variant of dict that stores itself to the database when + updating one of its keys. This is called and handled by + Attribute.validate_data(). + """ + def __init__(self, db_obj, *args, **kwargs): + """ + Sets up the packing dict. The db_store variable + is set by Attribute.validate_data() when returned in + order to allow custom updates to the dict. + + db_obj - the Attribute object storing this dict. + + The 'parent' property is set to 'init' at creation, + this stops the system from saving itself over and over + when first assigning the dict. Once initialization + is over, the Attribute from_attr() method will assign + the parent (or None, if at the root) + + """ + self.db_obj = db_obj + self.parent = 'init' + super(PackedDict, self).__init__(*args, **kwargs) + def __str__(self): + return "{%s}" % ", ".join("%s:%s" % (key, str(val)) for key, val in self.items()) + def save(self): + "Relay save operation upwards in tree until we hit the root." + if self.parent == 'init': + pass + elif self.parent: + self.parent.save() + else: + self.db_obj.value = self + def __setitem__(self, *args, **kwargs): + "assign item to this dict" + super(PackedDict, self).__setitem__(*args, **kwargs) + self.save() + def __delitem__(self, *args, **kwargs): + "delete with del self[key]" + super(PackedDict, self).__delitem__(*args, **kwargs) + self.save() + def clear(self, *args, **kwargs): + "Custom clear" + super(PackedDict, self).clear(*args, **kwargs) + self.save() + def pop(self, *args, **kwargs): + "Custom pop" + ret = super(PackedDict, self).pop(*args, **kwargs) + self.save() + return ret + def popitem(self, *args, **kwargs): + "Custom popitem" + ret = super(PackedDict, self).popitem(*args, **kwargs) + self.save() + return ret + def setdefault(self, *args, **kwargs): + "Custom setdefault" + super(PackedDict, self).setdefault(*args, **kwargs) + self.save() + def update(self, *args, **kwargs): + "Custom update" + super(PackedDict, self).update(*args, **kwargs) + self.save() + +class PackedList(list): + """ + Attribute helper class. + A variant of list that stores itself to the database when + updating one of its keys. This is called and handled by + Attribute.validate_data(). + """ + def __init__(self, db_obj, *args, **kwargs): + """ + sets up the packing list. + db_obj - the attribute object storing this list. + + the 'parent' property is set to 'init' at creation, + this stops the system from saving itself over and over + when first assigning the dict. once initialization + is over, the attribute from_attr() method will assign + the parent (or none, if at the root) + + """ + self.db_obj = db_obj + self.parent = 'init' + super(PackedList, self).__init__(*args, **kwargs) + def __str__(self): + return "[%s]" % ", ".join(str(val) for val in self) + def save(self): + "relay save operation upwards in tree until we hit the root." + if self.parent == 'init': + pass + elif self.parent: + self.parent.save() + else: + self.db_obj.value = self + def __setitem__(self, *args, **kwargs): + "Custom setitem that stores changed list to database." + super(PackedList, self).__setitem__(*args, **kwargs) + self.save() + def __delitem__(self, *args, **kwargs): + "delete with del self[index]" + super(PackedList, self).__delitem__(*args, **kwargs) + self.save() + def append(self, *args, **kwargs): + "Custom append" + super(PackedList, self).append(*args, **kwargs) + self.save() + def extend(self, *args, **kwargs): + "Custom extend" + super(PackedList, self).extend(*args, **kwargs) + self.save() + def insert(self, *args, **kwargs): + "Custom insert" + super(PackedList, self).insert(*args, **kwargs) + self.save() + def remove(self, *args, **kwargs): + "Custom remove" + super(PackedList, self).remove(*args, **kwargs) + self.save() + def pop(self, *args, **kwargs): + "Custom pop" + ret = super(PackedList, self).pop(*args, **kwargs) + self.save() + return ret + def reverse(self, *args, **kwargs): + "Custom reverse" + super(PackedList, self).reverse(*args, **kwargs) + self.save() + def sort(self, *args, **kwargs): + "Custom sort" + super(PackedList, self).sort(*args, **kwargs) + self.save() + +class PackedSet(set): + """ + A variant of Set that stores new updates to the databse. + """ + def __init__(self, db_obj, *args, **kwargs): + """ + sets up the packing set. + db_obj - the attribute object storing this set + + the 'parent' property is set to 'init' at creation, + this stops the system from saving itself over and over + when first assigning the dict. once initialization + is over, the attribute from_attr() method will assign + the parent (or none, if at the root) + + """ + self.db_obj = db_obj + self.parent = 'init' + super(PackedSet, self).__init__(*args, **kwargs) + def __str__(self): + return "{%s}" % ", ".join(str(val) for val in self) + def save(self): + "relay save operation upwards in tree until we hit the root." + if self.parent == 'init': + pass + elif self.parent: + self.parent.save() + else: + self.db_obj.value = self + def add(self, *args, **kwargs): + "Add an element to the set" + super(PackedSet, self).add(*args, **kwargs) + self.save() + def clear(self, *args, **kwargs): + "Remove all elements from this set" + super(PackedSet, self).clear(*args, **kwargs) + self.save() + def difference_update(self, *args, **kwargs): + "Remove all elements of another set from this set." + super(PackedSet, self).difference_update(*args, **kwargs) + self.save() + def discard(self, *args, **kwargs): + "Remove an element from a set if it is a member.\nIf not a member, do nothing." + super(PackedSet, self).discard(*args, **kwargs) + self.save() + def intersection_update(self, *args, **kwargs): + "Update a set with the intersection of itself and another." + super(PackedSet, self).intersection_update(*args, **kwargs) + self.save() + def pop(self, *args, **kwargs): + "Remove and return an arbitrary set element.\nRaises KeyError if the set is empty." + super(PackedSet, self).pop(*args, **kwargs) + self.save() + def remove(self, *args, **kwargs): + "Remove an element from a set; it must be a member.\nIf the element is not a member, raise a KeyError." + super(PackedSet, self).remove(*args, **kwargs) + self.save() + def symmetric_difference_update(self, *args, **kwargs): + "Update a set with the symmetric difference of itself and another." + super(PackedSet, self).symmetric_difference_update(*args, **kwargs) + self.save() + def update(self, *args, **kwargs): + "Update a set with the union of itself and others." + super(PackedSet, self).update(*args, **kwargs) + self.save() + +def to_attr(data): + """ + Convert data to proper attr data format before saving + + We have to make sure to not store database objects raw, since + this will crash the system. Instead we must store their IDs + and make sure to convert back when the attribute is read back + later. + + Due to this it's criticial that we check all iterables + recursively, converting all found database objects to a form + the database can handle. We handle lists, tuples and dicts + (and any nested combination of them) this way, all other + iterables are stored and returned as lists. + + data storage format: + (simple|dbobj|iter, ) + where + simple - a single non-db object, like a string or number + dbobj - a single dbobj + iter - any iterable object - will be looped over recursively + to convert dbobj->id. + + """ + + def iter_db2id(item): + """ + recursively looping through stored iterables, replacing objects with ids. + (Python only builds nested functions once, so there is no overhead for nesting) + """ + dtype = type(item) + if dtype in (basestring, int, float): # check the most common types first, for speed + return item + elif hasattr(item, "id") and hasattr(item, "db_model_name") and hasattr(item, "db_key"): + db_model_name = item.db_model_name + if db_model_name == "typeclass": + db_model_name = GA(item.dbobj, "db_model_name") + return PackedDBobject(item.id, db_model_name, item.db_key) + elif dtype == tuple: + return tuple(iter_db2id(val) for val in item) + elif dtype in (dict, PackedDict): + return dict((key, iter_db2id(val)) for key, val in item.items()) + elif hasattr(item, '__iter__'): + return list(iter_db2id(val) for val in item) + else: + return item + + dtype = type(data) + + if dtype in (basestring, int, float): + return ("simple",data) + elif hasattr(data, "id") and hasattr(data, "db_model_name") and hasattr(data, 'db_key'): + # all django models (objectdb,scriptdb,playerdb,channel,msg,typeclass) + # have the protected property db_model_name hardcoded on themselves for speed. + db_model_name = data.db_model_name + if db_model_name == "typeclass": + # typeclass cannot help us, we want the actual child object model name + db_model_name = GA(data.dbobj, "db_model_name") + return ("dbobj", PackedDBobject(data.id, db_model_name, data.db_key)) + elif hasattr(data, "__iter__"): + return ("iter", iter_db2id(data)) + else: + return ("simple", data) + +# modified for migration - converts to plain python properties +def from_attr(datatuple): + """ + Retrieve data from a previously stored attribute. This + is always a dict with keys type and data. + + datatuple comes from the database storage and has + the following format: + (simple|dbobj|iter, ) + where + simple - a single non-db object, like a string. is returned as-is. + dbobj - a single dbobj-id. This id is retrieved back from the database. + iter - an iterable. This is traversed iteratively, converting all found + dbobj-ids back to objects. Also, all lists and dictionaries are + returned as their PackedList/PackedDict counterparts in order to + allow in-place assignment such as obj.db.mylist[3] = val. Mylist + is then a PackedList that saves the data on the fly. + """ + # nested functions + def id2db(data): + """ + Convert db-stored dbref back to object + """ + mclass = CTYPEGET(model=data.db_model).model_class() + try: + return mclass.objects.get(id=data.id) + + except AttributeError: + try: + return mclass.objects.get(id=data.id) + except mclass.DoesNotExist: # could happen if object was deleted in the interim. + return None + + def iter_id2db(item): + """ + Recursively looping through stored iterables, replacing ids with actual objects. + We return PackedDict and PackedLists instead of normal lists; this is needed in order for + the user to do dynamic saving of nested in-place, such as obj.db.attrlist[2]=3. What is + stored in the database are however always normal python primitives. + """ + dtype = type(item) + if dtype in (basestring, int, float, long, bool): # check the most common types first, for speed + return item + elif dtype == PackedDBobject or hasattr(item, '__class__') and item.__class__.__name__ == "PackedDBobject": + return id2db(item) + elif dtype == tuple: + return tuple([iter_id2db(val) for val in item]) + elif dtype in (dict, PackedDict): + return dict(zip([key for key in item.keys()], + [iter_id2db(val) for val in item.values()])) + elif hasattr(item, '__iter__'): + return list(iter_id2db(val) for val in item) + else: + return item + + typ, data = datatuple + + if typ == 'simple': + # single non-db objects + return data + elif typ == 'dbobj': + # a single stored dbobj + return id2db(data) + elif typ == 'iter': + # all types of iterables + return iter_id2db(data) class Migration(SchemaMigration): def forwards(self, orm): - + # Deleting field 'PlayerAttribute.db_mode' - from src.players.models import PlayerAttribute - from src.typeclasses.models import PackedDBobject - for attr in PlayerAttribute.objects.all(): - # resave attributes - db_mode = attr.db_mode - if db_mode and db_mode != 'pickle': - # an object. We need to resave this. - if db_mode == 'object': - val = PackedDBobject(attr.db_value, "objectdb") - elif db_mode == 'player': - val = PackedDBobject(attr.db_value, "playerdb") - elif db_mode == 'script': - val = PackedDBobject(attr.db_value, "scriptdb") - elif db_mode == 'help': - val = PackedDBobject(attr.db_value, "helpentry") - else: - val = PackedDBobject(attr.db_value, db_mode) # channel, msg - attr.value = val + if not db.dry_run: + for attr in orm["players.PlayerAttribute"].objects.all(): + # resave attributes + db_mode = attr.db_mode + if db_mode and db_mode != 'pickle': + # an object. We need to resave this. + if db_mode == 'object': + val = PackedDBobject(attr.db_value, "objectdb") + elif db_mode == 'player': + val = PackedDBobject(attr.db_value, "playerdb") + elif db_mode == 'script': + val = PackedDBobject(attr.db_value, "scriptdb") + elif db_mode == 'help': + val = PackedDBobject(attr.db_value, "helpentry") + else: + val = PackedDBobject(attr.db_value, db_mode) # channel, msg + val = to_attr(val) + attr.db_value = val + attr.save() - db.delete_column('players_playerattribute', 'db_mode') + db.delete_column('players_playerattribute', 'db_mode') def backwards(self, orm): - + # Adding field 'PlayerAttribute.db_mode' db.add_column('players_playerattribute', 'db_mode', self.gf('django.db.models.fields.CharField')(max_length=20, null=True, blank=True), keep_default=False) diff --git a/src/players/migrations/0008_converting_attributes.py b/src/players/migrations/0008_converting_attributes.py index 5527892dfe..d7d91294fd 100644 --- a/src/players/migrations/0008_converting_attributes.py +++ b/src/players/migrations/0008_converting_attributes.py @@ -9,7 +9,7 @@ try: except ImportError: import pickle from src.utils.utils import to_str, to_unicode -from src.typeclasses.models import PackedDBobject,PackedDict,PackedList +#from src.typeclasses.models import PackedDBobject,PackedDict,PackedList from src.players.models import PlayerAttribute from django.contrib.contenttypes.models import ContentType @@ -18,6 +18,223 @@ GA = object.__getattribute__ SA = object.__setattr__ DA = object.__delattr__ +class PackedDBobject(object): + """ + Attribute helper class. + A container for storing and easily identifying database objects in + the database (which doesn't suppport storing db_objects directly). + """ + def __init__(self, ID, db_model, db_key): + self.id = ID + self.db_model = db_model + self.key = db_key + def __str__(self): + return "%s(#%s)" % (self.key, self.id) + def __unicode__(self): + return u"%s(#%s)" % (self.key, self.id) + +class PackedDict(dict): + """ + Attribute helper class. + A variant of dict that stores itself to the database when + updating one of its keys. This is called and handled by + Attribute.validate_data(). + """ + def __init__(self, db_obj, *args, **kwargs): + """ + Sets up the packing dict. The db_store variable + is set by Attribute.validate_data() when returned in + order to allow custom updates to the dict. + + db_obj - the Attribute object storing this dict. + + The 'parent' property is set to 'init' at creation, + this stops the system from saving itself over and over + when first assigning the dict. Once initialization + is over, the Attribute from_attr() method will assign + the parent (or None, if at the root) + + """ + self.db_obj = db_obj + self.parent = 'init' + super(PackedDict, self).__init__(*args, **kwargs) + def __str__(self): + return "{%s}" % ", ".join("%s:%s" % (key, str(val)) for key, val in self.items()) + def save(self): + "Relay save operation upwards in tree until we hit the root." + if self.parent == 'init': + pass + elif self.parent: + self.parent.save() + else: + self.db_obj.value = self + def __setitem__(self, *args, **kwargs): + "assign item to this dict" + super(PackedDict, self).__setitem__(*args, **kwargs) + self.save() + def __delitem__(self, *args, **kwargs): + "delete with del self[key]" + super(PackedDict, self).__delitem__(*args, **kwargs) + self.save() + def clear(self, *args, **kwargs): + "Custom clear" + super(PackedDict, self).clear(*args, **kwargs) + self.save() + def pop(self, *args, **kwargs): + "Custom pop" + ret = super(PackedDict, self).pop(*args, **kwargs) + self.save() + return ret + def popitem(self, *args, **kwargs): + "Custom popitem" + ret = super(PackedDict, self).popitem(*args, **kwargs) + self.save() + return ret + def setdefault(self, *args, **kwargs): + "Custom setdefault" + super(PackedDict, self).setdefault(*args, **kwargs) + self.save() + def update(self, *args, **kwargs): + "Custom update" + super(PackedDict, self).update(*args, **kwargs) + self.save() + +class PackedList(list): + """ + Attribute helper class. + A variant of list that stores itself to the database when + updating one of its keys. This is called and handled by + Attribute.validate_data(). + """ + def __init__(self, db_obj, *args, **kwargs): + """ + sets up the packing list. + db_obj - the attribute object storing this list. + + the 'parent' property is set to 'init' at creation, + this stops the system from saving itself over and over + when first assigning the dict. once initialization + is over, the attribute from_attr() method will assign + the parent (or none, if at the root) + + """ + self.db_obj = db_obj + self.parent = 'init' + super(PackedList, self).__init__(*args, **kwargs) + def __str__(self): + return "[%s]" % ", ".join(str(val) for val in self) + def save(self): + "relay save operation upwards in tree until we hit the root." + if self.parent == 'init': + pass + elif self.parent: + self.parent.save() + else: + self.db_obj.value = self + def __setitem__(self, *args, **kwargs): + "Custom setitem that stores changed list to database." + super(PackedList, self).__setitem__(*args, **kwargs) + self.save() + def __delitem__(self, *args, **kwargs): + "delete with del self[index]" + super(PackedList, self).__delitem__(*args, **kwargs) + self.save() + def append(self, *args, **kwargs): + "Custom append" + super(PackedList, self).append(*args, **kwargs) + self.save() + def extend(self, *args, **kwargs): + "Custom extend" + super(PackedList, self).extend(*args, **kwargs) + self.save() + def insert(self, *args, **kwargs): + "Custom insert" + super(PackedList, self).insert(*args, **kwargs) + self.save() + def remove(self, *args, **kwargs): + "Custom remove" + super(PackedList, self).remove(*args, **kwargs) + self.save() + def pop(self, *args, **kwargs): + "Custom pop" + ret = super(PackedList, self).pop(*args, **kwargs) + self.save() + return ret + def reverse(self, *args, **kwargs): + "Custom reverse" + super(PackedList, self).reverse(*args, **kwargs) + self.save() + def sort(self, *args, **kwargs): + "Custom sort" + super(PackedList, self).sort(*args, **kwargs) + self.save() + +class PackedSet(set): + """ + A variant of Set that stores new updates to the databse. + """ + def __init__(self, db_obj, *args, **kwargs): + """ + sets up the packing set. + db_obj - the attribute object storing this set + + the 'parent' property is set to 'init' at creation, + this stops the system from saving itself over and over + when first assigning the dict. once initialization + is over, the attribute from_attr() method will assign + the parent (or none, if at the root) + + """ + self.db_obj = db_obj + self.parent = 'init' + super(PackedSet, self).__init__(*args, **kwargs) + def __str__(self): + return "{%s}" % ", ".join(str(val) for val in self) + def save(self): + "relay save operation upwards in tree until we hit the root." + if self.parent == 'init': + pass + elif self.parent: + self.parent.save() + else: + self.db_obj.value = self + def add(self, *args, **kwargs): + "Add an element to the set" + super(PackedSet, self).add(*args, **kwargs) + self.save() + def clear(self, *args, **kwargs): + "Remove all elements from this set" + super(PackedSet, self).clear(*args, **kwargs) + self.save() + def difference_update(self, *args, **kwargs): + "Remove all elements of another set from this set." + super(PackedSet, self).difference_update(*args, **kwargs) + self.save() + def discard(self, *args, **kwargs): + "Remove an element from a set if it is a member.\nIf not a member, do nothing." + super(PackedSet, self).discard(*args, **kwargs) + self.save() + def intersection_update(self, *args, **kwargs): + "Update a set with the intersection of itself and another." + super(PackedSet, self).intersection_update(*args, **kwargs) + self.save() + def pop(self, *args, **kwargs): + "Remove and return an arbitrary set element.\nRaises KeyError if the set is empty." + super(PackedSet, self).pop(*args, **kwargs) + self.save() + def remove(self, *args, **kwargs): + "Remove an element from a set; it must be a member.\nIf the element is not a member, raise a KeyError." + super(PackedSet, self).remove(*args, **kwargs) + self.save() + def symmetric_difference_update(self, *args, **kwargs): + "Update a set with the symmetric difference of itself and another." + super(PackedSet, self).symmetric_difference_update(*args, **kwargs) + self.save() + def update(self, *args, **kwargs): + "Update a set with the union of itself and others." + super(PackedSet, self).update(*args, **kwargs) + self.save() + def to_attr(data): """ Convert data to proper attr data format before saving @@ -33,13 +250,13 @@ def to_attr(data): (and any nested combination of them) this way, all other iterables are stored and returned as lists. - data storage format: + data storage format: (simple|dbobj|iter, ) - where + where simple - a single non-db object, like a string or number dbobj - a single dbobj iter - any iterable object - will be looped over recursively - to convert dbobj->id. + to convert dbobj->id. """ @@ -50,7 +267,7 @@ def to_attr(data): """ dtype = type(item) if dtype in (basestring, int, float): # check the most common types first, for speed - return item + return item elif hasattr(item, "id") and hasattr(item, "db_model_name") and hasattr(item, "db_key"): db_model_name = item.db_model_name if db_model_name == "typeclass": @@ -76,8 +293,8 @@ def to_attr(data): if db_model_name == "typeclass": # typeclass cannot help us, we want the actual child object model name db_model_name = GA(data.dbobj, "db_model_name") - return ("dbobj", PackedDBobject(data.id, db_model_name, data.db_key)) - elif hasattr(data, "__iter__"): + return ("dbobj", PackedDBobject(data.id, db_model_name, data.db_key)) + elif hasattr(data, "__iter__"): return ("iter", iter_db2id(data)) else: return ("simple", data) @@ -85,21 +302,21 @@ def to_attr(data): def from_attr(attr, datatuple): """ Retrieve data from a previously stored attribute. This - is always a dict with keys type and data. + is always a dict with keys type and data. - datatuple comes from the database storage and has - the following format: + datatuple comes from the database storage and has + the following format: (simple|dbobj|iter, ) where simple - a single non-db object, like a string. is returned as-is. - dbobj - a single dbobj-id. This id is retrieved back from the database. + dbobj - a single dbobj-id. This id is retrieved back from the database. iter - an iterable. This is traversed iteratively, converting all found - dbobj-ids back to objects. Also, all lists and dictionaries are - returned as their PackedList/PackedDict counterparts in order to + dbobj-ids back to objects. Also, all lists and dictionaries are + returned as their PackedList/PackedDict counterparts in order to allow in-place assignment such as obj.db.mylist[3] = val. Mylist - is then a PackedList that saves the data on the fly. + is then a PackedList that saves the data on the fly. """ - # nested functions + # nested functions def id2db(data): """ Convert db-stored dbref back to object @@ -112,39 +329,39 @@ def from_attr(attr, datatuple): try: return mclass.objects.get(id=data.id) except mclass.DoesNotExist: # could happen if object was deleted in the interim. - return None + return None def iter_id2db(item): """ Recursively looping through stored iterables, replacing ids with actual objects. We return PackedDict and PackedLists instead of normal lists; this is needed in order for the user to do dynamic saving of nested in-place, such as obj.db.attrlist[2]=3. What is - stored in the database are however always normal python primitives. + stored in the database are however always normal python primitives. """ dtype = type(item) if dtype in (basestring, int, float): # check the most common types first, for speed - return item + return item elif dtype == PackedDBobject: return id2db(item) - elif dtype == tuple: + elif dtype == tuple: return tuple([iter_id2db(val) for val in item]) elif dtype in (dict, PackedDict): return PackedDict(attr, dict(zip([key for key in item.keys()], [iter_id2db(val) for val in item.values()]))) elif hasattr(item, '__iter__'): return PackedList(attr, list(iter_id2db(val) for val in item)) - else: - return item + else: + return item typ, data = datatuple - if typ == 'simple': + if typ == 'simple': # single non-db objects return data - elif typ == 'dbobj': - # a single stored dbobj + elif typ == 'dbobj': + # a single stored dbobj return id2db(data) - elif typ == 'iter': + elif typ == 'iter': # all types of iterables return iter_id2db(data) @@ -165,8 +382,8 @@ class Migration(DataMigration): val = ("simple", val) attr.db_value = to_unicode(pickle.dumps(to_str(to_attr(from_attr(attr, val))))) attr.save() - except TypeError, RuntimeError: - pass + except TypeError, RuntimeError: + pass def backwards(self, orm): diff --git a/src/players/migrations/0009_converting_attributes.py b/src/players/migrations/0009_converting_attributes.py index d7b3651c4d..3d2ddd083d 100644 --- a/src/players/migrations/0009_converting_attributes.py +++ b/src/players/migrations/0009_converting_attributes.py @@ -9,7 +9,7 @@ try: except ImportError: import pickle from src.utils.utils import to_str, to_unicode -from src.typeclasses.models import PackedDBobject +#from src.typeclasses.models import PackedDBobject from src.players.models import PlayerAttribute from django.contrib.contenttypes.models import ContentType @@ -18,66 +18,81 @@ GA = object.__getattribute__ SA = object.__setattr__ DA = object.__delattr__ +class PackedDBobject(object): + """ + Attribute helper class. + A container for storing and easily identifying database objects in + the database (which doesn't suppport storing db_objects directly). + """ + def __init__(self, ID, db_model, db_key): + self.id = ID + self.db_model = db_model + self.key = db_key + def __str__(self): + return "%s(#%s)" % (self.key, self.id) + def __unicode__(self): + return u"%s(#%s)" % (self.key, self.id) + class PackedDict(dict): """ Attribute helper class. - A variant of dict that stores itself to the database when - updating one of its keys. This is called and handled by - Attribute.validate_data(). + A variant of dict that stores itself to the database when + updating one of its keys. This is called and handled by + Attribute.validate_data(). """ def __init__(self, db_obj, *args, **kwargs): """ Sets up the packing dict. The db_store variable is set by Attribute.validate_data() when returned in - order to allow custom updates to the dict. + order to allow custom updates to the dict. db_obj - the Attribute object storing this dict. - + """ self.db_obj = db_obj super(PackedDict, self).__init__(*args, **kwargs) def __str__(self): return "{%s}" % ", ".join("%s:%s" % (key, str(val)) for key, val in self.items()) - def __setitem__(self, *args, **kwargs): + def __setitem__(self, *args, **kwargs): "assign item to this dict" super(PackedDict, self).__setitem__(*args, **kwargs) self.db_obj.value = self - def clear(self, *args, **kwargs): + def clear(self, *args, **kwargs): "Custom clear" super(PackedDict, self).clear(*args, **kwargs) self.db_obj.value = self - def pop(self, *args, **kwargs): + def pop(self, *args, **kwargs): "Custom pop" super(PackedDict, self).pop(*args, **kwargs) self.db_obj.value = self - def popitem(self, *args, **kwargs): + def popitem(self, *args, **kwargs): "Custom popitem" super(PackedDict, self).popitem(*args, **kwargs) self.db_obj.value = self - def update(self, *args, **kwargs): + def update(self, *args, **kwargs): "Custom update" super(PackedDict, self).update(*args, **kwargs) self.db_obj.value = self - + class PackedList(list): """ Attribute helper class. - A variant of list that stores itself to the database when - updating one of its keys. This is called and handled by - Attribute.validate_data(). + A variant of list that stores itself to the database when + updating one of its keys. This is called and handled by + Attribute.validate_data(). """ def __init__(self, db_obj, *args, **kwargs): """ - Sets up the packing list. + Sets up the packing list. db_obj - the Attribute object storing this dict. """ self.db_obj = db_obj super(PackedList, self).__init__(*args, **kwargs) def __str__(self): return "[%s]" % ", ".join(str(val) for val in self) - def __setitem__(self, *args, **kwargs): + def __setitem__(self, *args, **kwargs): "Custom setitem that stores changed list to database." - super(PackedList, self).__setitem__(*args, **kwargs) + super(PackedList, self).__setitem__(*args, **kwargs) self.db_obj.value = self def append(self, *args, **kwargs): "Custom append" @@ -124,13 +139,13 @@ def to_attr(data): (and any nested combination of them) this way, all other iterables are stored and returned as lists. - data storage format: + data storage format: (simple|dbobj|iter, ) - where + where simple - a single non-db object, like a string or number dbobj - a single dbobj iter - any iterable object - will be looped over recursively - to convert dbobj->id. + to convert dbobj->id. """ @@ -141,7 +156,7 @@ def to_attr(data): """ dtype = type(item) if dtype in (basestring, int, float): # check the most common types first, for speed - return item + return item elif hasattr(item, "id") and hasattr(item, "db_model_name") and hasattr(item, "db_key"): db_model_name = item.db_model_name if db_model_name == "typeclass": @@ -167,8 +182,8 @@ def to_attr(data): if db_model_name == "typeclass": # typeclass cannot help us, we want the actual child object model name db_model_name = GA(data.dbobj, "db_model_name") - return ("dbobj", PackedDBobject(data.id, db_model_name, data.db_key)) - elif hasattr(data, "__iter__"): + return ("dbobj", PackedDBobject(data.id, db_model_name, data.db_key)) + elif hasattr(data, "__iter__"): return ("iter", iter_db2id(data)) else: return ("simple", data) @@ -176,21 +191,21 @@ def to_attr(data): def from_attr(attr, datatuple): """ Retrieve data from a previously stored attribute. This - is always a dict with keys type and data. + is always a dict with keys type and data. - datatuple comes from the database storage and has - the following format: + datatuple comes from the database storage and has + the following format: (simple|dbobj|iter, ) where simple - a single non-db object, like a string. is returned as-is. - dbobj - a single dbobj-id. This id is retrieved back from the database. + dbobj - a single dbobj-id. This id is retrieved back from the database. iter - an iterable. This is traversed iteratively, converting all found - dbobj-ids back to objects. Also, all lists and dictionaries are - returned as their PackedList/PackedDict counterparts in order to + dbobj-ids back to objects. Also, all lists and dictionaries are + returned as their PackedList/PackedDict counterparts in order to allow in-place assignment such as obj.db.mylist[3] = val. Mylist - is then a PackedList that saves the data on the fly. + is then a PackedList that saves the data on the fly. """ - # nested functions + # nested functions def id2db(data): """ Convert db-stored dbref back to object @@ -203,39 +218,39 @@ def from_attr(attr, datatuple): try: return mclass.objects.get(id=data.id) except mclass.DoesNotExist: # could happen if object was deleted in the interim. - return None + return None def iter_id2db(item): """ Recursively looping through stored iterables, replacing ids with actual objects. We return PackedDict and PackedLists instead of normal lists; this is needed in order for the user to do dynamic saving of nested in-place, such as obj.db.attrlist[2]=3. What is - stored in the database are however always normal python primitives. + stored in the database are however always normal python primitives. """ dtype = type(item) if dtype in (basestring, int, float): # check the most common types first, for speed - return item + return item elif dtype == PackedDBobject: return id2db(item) - elif dtype == tuple: + elif dtype == tuple: return tuple([iter_id2db(val) for val in item]) elif dtype in (dict, PackedDict): return dict(zip([key for key in item.keys()], [iter_id2db(val) for val in item.values()])) elif hasattr(item, '__iter__'): return list(iter_id2db(val) for val in item) - else: - return item + else: + return item typ, data = datatuple - if typ == 'simple': + if typ == 'simple': # single non-db objects return data - elif typ == 'dbobj': - # a single stored dbobj + elif typ == 'dbobj': + # a single stored dbobj return id2db(data) - elif typ == 'iter': + elif typ == 'iter': # all types of iterables return iter_id2db(data) @@ -250,8 +265,8 @@ class Migration(DataMigration): val = pickle.loads(to_str(attr.db_value)) attr.db_value = to_unicode(pickle.dumps(to_str(to_attr(from_attr(attr, val))))) attr.save() - except TypeError, RuntimeError: - pass + except TypeError, RuntimeError: + pass def backwards(self, orm): diff --git a/src/players/migrations/0014_add_attr__playable_characters.py b/src/players/migrations/0014_add_attr__playable_characters.py index 604f4b45d4..fe54719545 100644 --- a/src/players/migrations/0014_add_attr__playable_characters.py +++ b/src/players/migrations/0014_add_attr__playable_characters.py @@ -3,7 +3,23 @@ import datetime, pickle from south.db import db from south.v2 import DataMigration from django.db import models -from src.typeclasses.models import PackedDBobject +#from src.typeclasses.models import PackedDBobject + +class PackedDBobject(object): + """ + Attribute helper class. + A container for storing and easily identifying database objects in + the database (which doesn't suppport storing db_objects directly). + """ + def __init__(self, ID, db_model, db_key): + self.id = ID + self.db_model = db_model + self.key = db_key + def __str__(self): + return "%s(#%s)" % (self.key, self.id) + def __unicode__(self): + return u"%s(#%s)" % (self.key, self.id) + class Migration(DataMigration): diff --git a/src/players/migrations/0018_convert_attrdata.py b/src/players/migrations/0018_convert_attrdata.py new file mode 100644 index 0000000000..3e928b4df4 --- /dev/null +++ b/src/players/migrations/0018_convert_attrdata.py @@ -0,0 +1,444 @@ +# -*- coding: utf-8 -*- +import datetime +from south.db import db +from south.v2 import DataMigration +from django.db import models + +from django.contrib.contenttypes.models import ContentType + +from src.utils.utils import to_str +from src.utils.dbserialize import to_pickle +try: + import cPickle as pickle +except ImportError: + import pickle +CTYPEGET = ContentType.objects.get +GA = object.__getattribute__ +SA = object.__setattr__ +DA = object.__delattr__ + + +# overloading pickle to have it find the PackedDBobj in this module +import pickle + +try: + from cStringIO import StringIO +except ImportError: + from StringIO import StringIO + +renametable = { + 'src.typeclasses.models': 'src.players.migrations.0018_convert_attrdata', + 'PackedDBobject': 'PackedDBobject', +} + +def mapname(name): + if name in renametable: + return renametable[name] + return name + +def mapped_load_global(self): + module = mapname(self.readline()[:-1]) + name = mapname(self.readline()[:-1]) + klass = self.find_class(module, name) + self.append(klass) + +def loads(str): + file = StringIO(str) + unpickler = pickle.Unpickler(file) + unpickler.dispatch[pickle.GLOBAL] = mapped_load_global + return unpickler.load() + + + +class PackedDBobject(object): + """ + Attribute helper class. + A container for storing and easily identifying database objects in + the database (which doesn't suppport storing db_objects directly). + """ + def __init__(self, ID, db_model, db_key): + self.id = ID + self.db_model = db_model + self.key = db_key + def __str__(self): + return "%s(#%s)" % (self.key, self.id) + def __unicode__(self): + return u"%s(#%s)" % (self.key, self.id) + +class PackedDict(dict): + """ + Attribute helper class. + A variant of dict that stores itself to the database when + updating one of its keys. This is called and handled by + Attribute.validate_data(). + """ + def __init__(self, db_obj, *args, **kwargs): + """ + Sets up the packing dict. The db_store variable + is set by Attribute.validate_data() when returned in + order to allow custom updates to the dict. + + db_obj - the Attribute object storing this dict. + + The 'parent' property is set to 'init' at creation, + this stops the system from saving itself over and over + when first assigning the dict. Once initialization + is over, the Attribute from_attr() method will assign + the parent (or None, if at the root) + + """ + self.db_obj = db_obj + self.parent = 'init' + super(PackedDict, self).__init__(*args, **kwargs) + def __str__(self): + return "{%s}" % ", ".join("%s:%s" % (key, str(val)) for key, val in self.items()) + def save(self): + "Relay save operation upwards in tree until we hit the root." + if self.parent == 'init': + pass + elif self.parent: + self.parent.save() + else: + self.db_obj.value = self + def __setitem__(self, *args, **kwargs): + "assign item to this dict" + super(PackedDict, self).__setitem__(*args, **kwargs) + self.save() + def __delitem__(self, *args, **kwargs): + "delete with del self[key]" + super(PackedDict, self).__delitem__(*args, **kwargs) + self.save() + def clear(self, *args, **kwargs): + "Custom clear" + super(PackedDict, self).clear(*args, **kwargs) + self.save() + def pop(self, *args, **kwargs): + "Custom pop" + ret = super(PackedDict, self).pop(*args, **kwargs) + self.save() + return ret + def popitem(self, *args, **kwargs): + "Custom popitem" + ret = super(PackedDict, self).popitem(*args, **kwargs) + self.save() + return ret + def setdefault(self, *args, **kwargs): + "Custom setdefault" + super(PackedDict, self).setdefault(*args, **kwargs) + self.save() + def update(self, *args, **kwargs): + "Custom update" + super(PackedDict, self).update(*args, **kwargs) + self.save() + +class PackedList(list): + """ + Attribute helper class. + A variant of list that stores itself to the database when + updating one of its keys. This is called and handled by + Attribute.validate_data(). + """ + def __init__(self, db_obj, *args, **kwargs): + """ + sets up the packing list. + db_obj - the attribute object storing this list. + + the 'parent' property is set to 'init' at creation, + this stops the system from saving itself over and over + when first assigning the dict. once initialization + is over, the attribute from_attr() method will assign + the parent (or none, if at the root) + + """ + self.db_obj = db_obj + self.parent = 'init' + super(PackedList, self).__init__(*args, **kwargs) + def __str__(self): + return "[%s]" % ", ".join(str(val) for val in self) + def save(self): + "relay save operation upwards in tree until we hit the root." + if self.parent == 'init': + pass + elif self.parent: + self.parent.save() + else: + self.db_obj.value = self + def __setitem__(self, *args, **kwargs): + "Custom setitem that stores changed list to database." + super(PackedList, self).__setitem__(*args, **kwargs) + self.save() + def __delitem__(self, *args, **kwargs): + "delete with del self[index]" + super(PackedList, self).__delitem__(*args, **kwargs) + self.save() + def append(self, *args, **kwargs): + "Custom append" + super(PackedList, self).append(*args, **kwargs) + self.save() + def extend(self, *args, **kwargs): + "Custom extend" + super(PackedList, self).extend(*args, **kwargs) + self.save() + def insert(self, *args, **kwargs): + "Custom insert" + super(PackedList, self).insert(*args, **kwargs) + self.save() + def remove(self, *args, **kwargs): + "Custom remove" + super(PackedList, self).remove(*args, **kwargs) + self.save() + def pop(self, *args, **kwargs): + "Custom pop" + ret = super(PackedList, self).pop(*args, **kwargs) + self.save() + return ret + def reverse(self, *args, **kwargs): + "Custom reverse" + super(PackedList, self).reverse(*args, **kwargs) + self.save() + def sort(self, *args, **kwargs): + "Custom sort" + super(PackedList, self).sort(*args, **kwargs) + self.save() + +class PackedSet(set): + """ + A variant of Set that stores new updates to the databse. + """ + def __init__(self, db_obj, *args, **kwargs): + """ + sets up the packing set. + db_obj - the attribute object storing this set + + the 'parent' property is set to 'init' at creation, + this stops the system from saving itself over and over + when first assigning the dict. once initialization + is over, the attribute from_attr() method will assign + the parent (or none, if at the root) + + """ + self.db_obj = db_obj + self.parent = 'init' + super(PackedSet, self).__init__(*args, **kwargs) + def __str__(self): + return "{%s}" % ", ".join(str(val) for val in self) + def save(self): + "relay save operation upwards in tree until we hit the root." + if self.parent == 'init': + pass + elif self.parent: + self.parent.save() + else: + self.db_obj.value = self + def add(self, *args, **kwargs): + "Add an element to the set" + super(PackedSet, self).add(*args, **kwargs) + self.save() + def clear(self, *args, **kwargs): + "Remove all elements from this set" + super(PackedSet, self).clear(*args, **kwargs) + self.save() + def difference_update(self, *args, **kwargs): + "Remove all elements of another set from this set." + super(PackedSet, self).difference_update(*args, **kwargs) + self.save() + def discard(self, *args, **kwargs): + "Remove an element from a set if it is a member.\nIf not a member, do nothing." + super(PackedSet, self).discard(*args, **kwargs) + self.save() + def intersection_update(self, *args, **kwargs): + "Update a set with the intersection of itself and another." + super(PackedSet, self).intersection_update(*args, **kwargs) + self.save() + def pop(self, *args, **kwargs): + "Remove and return an arbitrary set element.\nRaises KeyError if the set is empty." + super(PackedSet, self).pop(*args, **kwargs) + self.save() + def remove(self, *args, **kwargs): + "Remove an element from a set; it must be a member.\nIf the element is not a member, raise a KeyError." + super(PackedSet, self).remove(*args, **kwargs) + self.save() + def symmetric_difference_update(self, *args, **kwargs): + "Update a set with the symmetric difference of itself and another." + super(PackedSet, self).symmetric_difference_update(*args, **kwargs) + self.save() + def update(self, *args, **kwargs): + "Update a set with the union of itself and others." + super(PackedSet, self).update(*args, **kwargs) + self.save() + + + +class Migration(DataMigration): + + def forwards(self, orm): + "Write your forwards methods here." + # Note: Remember to use orm['appname.ModelName'] rather than "from appname.models..." + + # modified for migration - converts to plain python properties + def from_attr(datatuple): + """ + Retrieve data from a previously stored attribute. This + is always a dict with keys type and data. + + datatuple comes from the database storage and has + the following format: + (simple|dbobj|iter, ) + where + simple - a single non-db object, like a string. is returned as-is. + dbobj - a single dbobj-id. This id is retrieved back from the database. + iter - an iterable. This is traversed iteratively, converting all found + dbobj-ids back to objects. Also, all lists and dictionaries are + returned as their PackedList/PackedDict counterparts in order to + allow in-place assignment such as obj.db.mylist[3] = val. Mylist + is then a PackedList that saves the data on the fly. + """ + # nested functions + def id2db(data): + """ + Convert db-stored dbref back to object + """ + mclass = orm[data.db_model].model_class() + #mclass = CTYPEGET(model=data.db_model).model_class() + try: + return mclass.objects.get(id=data.id) + + except AttributeError: + try: + return mclass.objects.get(id=data.id) + except mclass.DoesNotExist: # could happen if object was deleted in the interim. + return None + + def iter_id2db(item): + """ + Recursively looping through stored iterables, replacing ids with actual objects. + We return PackedDict and PackedLists instead of normal lists; this is needed in order for + the user to do dynamic saving of nested in-place, such as obj.db.attrlist[2]=3. What is + stored in the database are however always normal python primitives. + """ + dtype = type(item) + if dtype in (basestring, int, float, long, bool): # check the most common types first, for speed + return item + elif dtype == PackedDBobject or hasattr(item, '__class__') and item.__class__.__name__ == "PackedDBobject": + return id2db(item) + elif dtype == tuple: + return tuple([iter_id2db(val) for val in item]) + elif dtype in (dict, PackedDict): + return dict(zip([key for key in item.keys()], + [iter_id2db(val) for val in item.values()])) + elif hasattr(item, '__iter__'): + return list(iter_id2db(val) for val in item) + else: + return item + + typ, data = datatuple + + if typ == 'simple': + # single non-db objects + return data + elif typ == 'dbobj': + # a single stored dbobj + return id2db(data) + elif typ == 'iter': + # all types of iterables + return iter_id2db(data) + + if not db.dry_run: + for attr in orm['players.PlayerAttribute'].objects.all(): + # repack attr into new format and reimport + datatuple = loads(to_str(attr.db_value)) + python_data = from_attr(datatuple) + new_data = to_pickle(python_data) + attr.db_value2 = new_data # new pickleObjectField + attr.save() + + def backwards(self, orm): + "Write your backwards methods here." + raise RuntimeError("This migration cannot be reversed.") + + models = { + 'auth.group': { + 'Meta': {'object_name': 'Group'}, + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}), + 'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}) + }, + 'auth.permission': { + 'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'}, + 'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}), + 'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'name': ('django.db.models.fields.CharField', [], {'max_length': '50'}) + }, + 'auth.user': { + 'Meta': {'object_name': 'User'}, + 'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), + 'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}), + 'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}), + 'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}), + 'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), + 'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), + 'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), + 'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}), + 'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}), + 'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}), + 'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'}) + }, + 'objects.objectdb': { + 'Meta': {'object_name': 'ObjectDB'}, + 'db_cmdset_storage': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}), + 'db_date_created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}), + 'db_destination': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'destinations_set'", 'null': 'True', 'to': "orm['objects.ObjectDB']"}), + 'db_home': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'homes_set'", 'null': 'True', 'to': "orm['objects.ObjectDB']"}), + 'db_key': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}), + 'db_location': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'locations_set'", 'null': 'True', 'to': "orm['objects.ObjectDB']"}), + 'db_lock_storage': ('django.db.models.fields.TextField', [], {'blank': 'True'}), + 'db_permissions': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}), + 'db_player': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['players.PlayerDB']", 'null': 'True', 'blank': 'True'}), + 'db_sessid': ('django.db.models.fields.IntegerField', [], {'null': 'True'}), + 'db_typeclass_path': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True'}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}) + }, + 'contenttypes.contenttype': { + 'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"}, + 'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}), + 'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}) + }, + 'players.playerattribute': { + 'Meta': {'object_name': 'PlayerAttribute'}, + 'db_date_created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}), + 'db_key': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}), + 'db_lock_storage': ('django.db.models.fields.TextField', [], {'blank': 'True'}), + 'db_obj': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['players.PlayerDB']"}), + 'db_value': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}), + 'db_value2': ('src.utils.picklefield.PickledObjectField', [], {'null': 'True'}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}) + }, + 'players.playerdb': { + 'Meta': {'object_name': 'PlayerDB'}, + 'db_cmdset_storage': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True'}), + 'db_date_created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}), + 'db_is_connected': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), + 'db_key': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}), + 'db_lock_storage': ('django.db.models.fields.TextField', [], {'blank': 'True'}), + 'db_permissions': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}), + 'db_typeclass_path': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True'}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'unique': 'True'}) + }, + 'players.playernick': { + 'Meta': {'unique_together': "(('db_nick', 'db_type', 'db_obj'),)", 'object_name': 'PlayerNick'}, + 'db_nick': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}), + 'db_obj': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['players.PlayerDB']"}), + 'db_real': ('django.db.models.fields.TextField', [], {}), + 'db_type': ('django.db.models.fields.CharField', [], {'default': "'inputline'", 'max_length': '16', 'null': 'True', 'blank': 'True'}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}) + } + } + + complete_apps = ['players'] + symmetrical = True diff --git a/src/scripts/migrations/0002_auto__del_field_scriptattribute_db_mode.py b/src/scripts/migrations/0002_auto__del_field_scriptattribute_db_mode.py index 6777333c3f..495be061b8 100644 --- a/src/scripts/migrations/0002_auto__del_field_scriptattribute_db_mode.py +++ b/src/scripts/migrations/0002_auto__del_field_scriptattribute_db_mode.py @@ -4,36 +4,91 @@ from south.db import db from south.v2 import SchemaMigration from django.db import models +from src.utils.dbserialize import to_pickle +try: + import cPickle as pickle +except ImportError: + import pickle +GA = object.__getattribute__ +SA = object.__setattr__ +DA = object.__delattr__ + + +# overloading pickle to have it find the PackedDBobj in this module +import pickle + +try: + from cStringIO import StringIO +except ImportError: + from StringIO import StringIO + +renametable = { + 'src.typeclasses.models': 'src.objects.migrations.0019_convert_attrdata', + 'PackedDBobject': 'PackedDBobject', +} + +def mapname(name): + if name in renametable: + return renametable[name] + return name + +def mapped_load_global(self): + module = mapname(self.readline()[:-1]) + name = mapname(self.readline()[:-1]) + klass = self.find_class(module, name) + self.append(klass) + +def loads(str): + file = StringIO(str) + unpickler = pickle.Unpickler(file) + unpickler.dispatch[pickle.GLOBAL] = mapped_load_global + return unpickler.load() + + + +class PackedDBobject(object): + """ + Attribute helper class. + A container for storing and easily identifying database objects in + the database (which doesn't suppport storing db_objects directly). + """ + def __init__(self, ID, db_model, db_key): + self.id = ID + self.db_model = db_model + self.key = db_key + def __str__(self): + return "%s(#%s)" % (self.key, self.id) + def __unicode__(self): + return u"%s(#%s)" % (self.key, self.id) class Migration(SchemaMigration): def forwards(self, orm): - + # Deleting field 'ScriptAttribute.db_mode' - from src.scripts.models import ScriptAttribute - from src.typeclasses.models import PackedDBobject - for attr in ScriptAttribute.objects.all(): - # resave attributes - db_mode = attr.db_mode - if db_mode and db_mode != 'pickle': - # an object. We need to resave this. - if db_mode == 'object': - val = PackedDBobject(attr.db_value, "objectdb") - elif db_mode == 'player': - val = PackedDBobject(attr.db_value, "playerdb") - elif db_mode == 'script': - val = PackedDBobject(attr.db_value, "scriptdb") - elif db_mode == 'help': - val = PackedDBobject(attr.db_value, "helpentry") - else: - val = PackedDBobject(attr.db_value, db_mode) # channel, msg - attr.value = val + if not db.dry_run: + for attr in orm["scripts.ScriptAttribute"].objects.all(): + # resave attributes + db_mode = attr.db_mode + if db_mode and db_mode != 'pickle': + # an object. We need to resave this. + if db_mode == 'object': + val = PackedDBobject(attr.db_value, "objectdb") + elif db_mode == 'player': + val = PackedDBobject(attr.db_value, "playerdb") + elif db_mode == 'script': + val = PackedDBobject(attr.db_value, "scriptdb") + elif db_mode == 'help': + val = PackedDBobject(attr.db_value, "helpentry") + else: + val = PackedDBobject(attr.db_value, db_mode) # channel, msg + attr.value = val db.delete_column('scripts_scriptattribute', 'db_mode') def backwards(self, orm): - + # Adding field 'ScriptAttribute.db_mode' db.add_column('scripts_scriptattribute', 'db_mode', self.gf('django.db.models.fields.CharField')(max_length=20, null=True, blank=True), keep_default=False) diff --git a/src/scripts/migrations/0005_converting_attributes.py b/src/scripts/migrations/0005_converting_attributes.py index 42c1bbb7ad..1f37695772 100644 --- a/src/scripts/migrations/0005_converting_attributes.py +++ b/src/scripts/migrations/0005_converting_attributes.py @@ -9,7 +9,7 @@ try: except ImportError: import pickle from src.utils.utils import to_str, to_unicode -from src.typeclasses.models import PackedDBobject,PackedDict,PackedList +#from src.typeclasses.models import PackedDBobject,PackedDict,PackedList from django.contrib.contenttypes.models import ContentType CTYPEGET = ContentType.objects.get @@ -17,6 +17,223 @@ GA = object.__getattribute__ SA = object.__setattr__ DA = object.__delattr__ +class PackedDBobject(object): + """ + Attribute helper class. + A container for storing and easily identifying database objects in + the database (which doesn't suppport storing db_objects directly). + """ + def __init__(self, ID, db_model, db_key): + self.id = ID + self.db_model = db_model + self.key = db_key + def __str__(self): + return "%s(#%s)" % (self.key, self.id) + def __unicode__(self): + return u"%s(#%s)" % (self.key, self.id) + +class PackedDict(dict): + """ + Attribute helper class. + A variant of dict that stores itself to the database when + updating one of its keys. This is called and handled by + Attribute.validate_data(). + """ + def __init__(self, db_obj, *args, **kwargs): + """ + Sets up the packing dict. The db_store variable + is set by Attribute.validate_data() when returned in + order to allow custom updates to the dict. + + db_obj - the Attribute object storing this dict. + + The 'parent' property is set to 'init' at creation, + this stops the system from saving itself over and over + when first assigning the dict. Once initialization + is over, the Attribute from_attr() method will assign + the parent (or None, if at the root) + + """ + self.db_obj = db_obj + self.parent = 'init' + super(PackedDict, self).__init__(*args, **kwargs) + def __str__(self): + return "{%s}" % ", ".join("%s:%s" % (key, str(val)) for key, val in self.items()) + def save(self): + "Relay save operation upwards in tree until we hit the root." + if self.parent == 'init': + pass + elif self.parent: + self.parent.save() + else: + self.db_obj.value = self + def __setitem__(self, *args, **kwargs): + "assign item to this dict" + super(PackedDict, self).__setitem__(*args, **kwargs) + self.save() + def __delitem__(self, *args, **kwargs): + "delete with del self[key]" + super(PackedDict, self).__delitem__(*args, **kwargs) + self.save() + def clear(self, *args, **kwargs): + "Custom clear" + super(PackedDict, self).clear(*args, **kwargs) + self.save() + def pop(self, *args, **kwargs): + "Custom pop" + ret = super(PackedDict, self).pop(*args, **kwargs) + self.save() + return ret + def popitem(self, *args, **kwargs): + "Custom popitem" + ret = super(PackedDict, self).popitem(*args, **kwargs) + self.save() + return ret + def setdefault(self, *args, **kwargs): + "Custom setdefault" + super(PackedDict, self).setdefault(*args, **kwargs) + self.save() + def update(self, *args, **kwargs): + "Custom update" + super(PackedDict, self).update(*args, **kwargs) + self.save() + +class PackedList(list): + """ + Attribute helper class. + A variant of list that stores itself to the database when + updating one of its keys. This is called and handled by + Attribute.validate_data(). + """ + def __init__(self, db_obj, *args, **kwargs): + """ + sets up the packing list. + db_obj - the attribute object storing this list. + + the 'parent' property is set to 'init' at creation, + this stops the system from saving itself over and over + when first assigning the dict. once initialization + is over, the attribute from_attr() method will assign + the parent (or none, if at the root) + + """ + self.db_obj = db_obj + self.parent = 'init' + super(PackedList, self).__init__(*args, **kwargs) + def __str__(self): + return "[%s]" % ", ".join(str(val) for val in self) + def save(self): + "relay save operation upwards in tree until we hit the root." + if self.parent == 'init': + pass + elif self.parent: + self.parent.save() + else: + self.db_obj.value = self + def __setitem__(self, *args, **kwargs): + "Custom setitem that stores changed list to database." + super(PackedList, self).__setitem__(*args, **kwargs) + self.save() + def __delitem__(self, *args, **kwargs): + "delete with del self[index]" + super(PackedList, self).__delitem__(*args, **kwargs) + self.save() + def append(self, *args, **kwargs): + "Custom append" + super(PackedList, self).append(*args, **kwargs) + self.save() + def extend(self, *args, **kwargs): + "Custom extend" + super(PackedList, self).extend(*args, **kwargs) + self.save() + def insert(self, *args, **kwargs): + "Custom insert" + super(PackedList, self).insert(*args, **kwargs) + self.save() + def remove(self, *args, **kwargs): + "Custom remove" + super(PackedList, self).remove(*args, **kwargs) + self.save() + def pop(self, *args, **kwargs): + "Custom pop" + ret = super(PackedList, self).pop(*args, **kwargs) + self.save() + return ret + def reverse(self, *args, **kwargs): + "Custom reverse" + super(PackedList, self).reverse(*args, **kwargs) + self.save() + def sort(self, *args, **kwargs): + "Custom sort" + super(PackedList, self).sort(*args, **kwargs) + self.save() + +class PackedSet(set): + """ + A variant of Set that stores new updates to the databse. + """ + def __init__(self, db_obj, *args, **kwargs): + """ + sets up the packing set. + db_obj - the attribute object storing this set + + the 'parent' property is set to 'init' at creation, + this stops the system from saving itself over and over + when first assigning the dict. once initialization + is over, the attribute from_attr() method will assign + the parent (or none, if at the root) + + """ + self.db_obj = db_obj + self.parent = 'init' + super(PackedSet, self).__init__(*args, **kwargs) + def __str__(self): + return "{%s}" % ", ".join(str(val) for val in self) + def save(self): + "relay save operation upwards in tree until we hit the root." + if self.parent == 'init': + pass + elif self.parent: + self.parent.save() + else: + self.db_obj.value = self + def add(self, *args, **kwargs): + "Add an element to the set" + super(PackedSet, self).add(*args, **kwargs) + self.save() + def clear(self, *args, **kwargs): + "Remove all elements from this set" + super(PackedSet, self).clear(*args, **kwargs) + self.save() + def difference_update(self, *args, **kwargs): + "Remove all elements of another set from this set." + super(PackedSet, self).difference_update(*args, **kwargs) + self.save() + def discard(self, *args, **kwargs): + "Remove an element from a set if it is a member.\nIf not a member, do nothing." + super(PackedSet, self).discard(*args, **kwargs) + self.save() + def intersection_update(self, *args, **kwargs): + "Update a set with the intersection of itself and another." + super(PackedSet, self).intersection_update(*args, **kwargs) + self.save() + def pop(self, *args, **kwargs): + "Remove and return an arbitrary set element.\nRaises KeyError if the set is empty." + super(PackedSet, self).pop(*args, **kwargs) + self.save() + def remove(self, *args, **kwargs): + "Remove an element from a set; it must be a member.\nIf the element is not a member, raise a KeyError." + super(PackedSet, self).remove(*args, **kwargs) + self.save() + def symmetric_difference_update(self, *args, **kwargs): + "Update a set with the symmetric difference of itself and another." + super(PackedSet, self).symmetric_difference_update(*args, **kwargs) + self.save() + def update(self, *args, **kwargs): + "Update a set with the union of itself and others." + super(PackedSet, self).update(*args, **kwargs) + self.save() + def to_attr(data): """ Convert data to proper attr data format before saving @@ -32,13 +249,13 @@ def to_attr(data): (and any nested combination of them) this way, all other iterables are stored and returned as lists. - data storage format: + data storage format: (simple|dbobj|iter, ) - where + where simple - a single non-db object, like a string or number dbobj - a single dbobj iter - any iterable object - will be looped over recursively - to convert dbobj->id. + to convert dbobj->id. """ @@ -49,7 +266,7 @@ def to_attr(data): """ dtype = type(item) if dtype in (basestring, int, float): # check the most common types first, for speed - return item + return item elif hasattr(item, "id") and hasattr(item, "db_model_name") and hasattr(item, "db_key"): db_model_name = item.db_model_name if db_model_name == "typeclass": @@ -75,8 +292,8 @@ def to_attr(data): if db_model_name == "typeclass": # typeclass cannot help us, we want the actual child object model name db_model_name = GA(data.dbobj, "db_model_name") - return ("dbobj", PackedDBobject(data.id, db_model_name, data.db_key)) - elif hasattr(data, "__iter__"): + return ("dbobj", PackedDBobject(data.id, db_model_name, data.db_key)) + elif hasattr(data, "__iter__"): return ("iter", iter_db2id(data)) else: return ("simple", data) @@ -84,21 +301,21 @@ def to_attr(data): def from_attr(attr, datatuple): """ Retrieve data from a previously stored attribute. This - is always a dict with keys type and data. + is always a dict with keys type and data. - datatuple comes from the database storage and has - the following format: + datatuple comes from the database storage and has + the following format: (simple|dbobj|iter, ) where simple - a single non-db object, like a string. is returned as-is. - dbobj - a single dbobj-id. This id is retrieved back from the database. + dbobj - a single dbobj-id. This id is retrieved back from the database. iter - an iterable. This is traversed iteratively, converting all found - dbobj-ids back to objects. Also, all lists and dictionaries are - returned as their PackedList/PackedDict counterparts in order to + dbobj-ids back to objects. Also, all lists and dictionaries are + returned as their PackedList/PackedDict counterparts in order to allow in-place assignment such as obj.db.mylist[3] = val. Mylist - is then a PackedList that saves the data on the fly. + is then a PackedList that saves the data on the fly. """ - # nested functions + # nested functions def id2db(data): """ Convert db-stored dbref back to object @@ -111,39 +328,39 @@ def from_attr(attr, datatuple): try: return mclass.objects.get(id=data.id) except mclass.DoesNotExist: # could happen if object was deleted in the interim. - return None + return None def iter_id2db(item): """ Recursively looping through stored iterables, replacing ids with actual objects. We return PackedDict and PackedLists instead of normal lists; this is needed in order for the user to do dynamic saving of nested in-place, such as obj.db.attrlist[2]=3. What is - stored in the database are however always normal python primitives. + stored in the database are however always normal python primitives. """ dtype = type(item) if dtype in (basestring, int, float): # check the most common types first, for speed - return item + return item elif dtype == PackedDBobject: return id2db(item) - elif dtype == tuple: + elif dtype == tuple: return tuple([iter_id2db(val) for val in item]) elif dtype in (dict, PackedDict): return PackedDict(attr, dict(zip([key for key in item.keys()], [iter_id2db(val) for val in item.values()]))) elif hasattr(item, '__iter__'): return PackedList(attr, list(iter_id2db(val) for val in item)) - else: - return item + else: + return item typ, data = datatuple - if typ == 'simple': + if typ == 'simple': # single non-db objects return data - elif typ == 'dbobj': - # a single stored dbobj + elif typ == 'dbobj': + # a single stored dbobj return id2db(data) - elif typ == 'iter': + elif typ == 'iter': # all types of iterables return iter_id2db(data) diff --git a/src/scripts/migrations/0006_converting_attributes.py b/src/scripts/migrations/0006_converting_attributes.py index b1e56d5b94..f236af33f2 100644 --- a/src/scripts/migrations/0006_converting_attributes.py +++ b/src/scripts/migrations/0006_converting_attributes.py @@ -9,7 +9,7 @@ try: except ImportError: import pickle from src.utils.utils import to_str, to_unicode -from src.typeclasses.models import PackedDBobject +#from src.typeclasses.models import PackedDBobject from django.contrib.contenttypes.models import ContentType CTYPEGET = ContentType.objects.get @@ -17,66 +17,82 @@ GA = object.__getattribute__ SA = object.__setattr__ DA = object.__delattr__ +class PackedDBobject(object): + """ + Attribute helper class. + A container for storing and easily identifying database objects in + the database (which doesn't suppport storing db_objects directly). + """ + def __init__(self, ID, db_model, db_key): + self.id = ID + self.db_model = db_model + self.key = db_key + def __str__(self): + return "%s(#%s)" % (self.key, self.id) + def __unicode__(self): + return u"%s(#%s)" % (self.key, self.id) + + class PackedDict(dict): """ Attribute helper class. - A variant of dict that stores itself to the database when - updating one of its keys. This is called and handled by - Attribute.validate_data(). + A variant of dict that stores itself to the database when + updating one of its keys. This is called and handled by + Attribute.validate_data(). """ def __init__(self, db_obj, *args, **kwargs): """ Sets up the packing dict. The db_store variable is set by Attribute.validate_data() when returned in - order to allow custom updates to the dict. + order to allow custom updates to the dict. db_obj - the Attribute object storing this dict. - + """ self.db_obj = db_obj super(PackedDict, self).__init__(*args, **kwargs) def __str__(self): return "{%s}" % ", ".join("%s:%s" % (key, str(val)) for key, val in self.items()) - def __setitem__(self, *args, **kwargs): + def __setitem__(self, *args, **kwargs): "assign item to this dict" super(PackedDict, self).__setitem__(*args, **kwargs) self.db_obj.value = self - def clear(self, *args, **kwargs): + def clear(self, *args, **kwargs): "Custom clear" super(PackedDict, self).clear(*args, **kwargs) self.db_obj.value = self - def pop(self, *args, **kwargs): + def pop(self, *args, **kwargs): "Custom pop" super(PackedDict, self).pop(*args, **kwargs) self.db_obj.value = self - def popitem(self, *args, **kwargs): + def popitem(self, *args, **kwargs): "Custom popitem" super(PackedDict, self).popitem(*args, **kwargs) self.db_obj.value = self - def update(self, *args, **kwargs): + def update(self, *args, **kwargs): "Custom update" super(PackedDict, self).update(*args, **kwargs) self.db_obj.value = self - + class PackedList(list): """ Attribute helper class. - A variant of list that stores itself to the database when - updating one of its keys. This is called and handled by - Attribute.validate_data(). + A variant of list that stores itself to the database when + updating one of its keys. This is called and handled by + Attribute.validate_data(). """ def __init__(self, db_obj, *args, **kwargs): """ - Sets up the packing list. + Sets up the packing list. db_obj - the Attribute object storing this dict. """ self.db_obj = db_obj super(PackedList, self).__init__(*args, **kwargs) def __str__(self): return "[%s]" % ", ".join(str(val) for val in self) - def __setitem__(self, *args, **kwargs): + def __setitem__(self, *args, **kwargs): "Custom setitem that stores changed list to database." - super(PackedList, self).__setitem__(*args, **kwargs) + super(PackedList, self).__setitem__(*args, **kwargs) self.db_obj.value = self def append(self, *args, **kwargs): "Custom append" @@ -123,13 +139,13 @@ def to_attr(data): (and any nested combination of them) this way, all other iterables are stored and returned as lists. - data storage format: + data storage format: (simple|dbobj|iter, ) - where + where simple - a single non-db object, like a string or number dbobj - a single dbobj iter - any iterable object - will be looped over recursively - to convert dbobj->id. + to convert dbobj->id. """ @@ -140,7 +156,7 @@ def to_attr(data): """ dtype = type(item) if dtype in (basestring, int, float): # check the most common types first, for speed - return item + return item elif hasattr(item, "id") and hasattr(item, "db_model_name") and hasattr(item, "db_key"): db_model_name = item.db_model_name if db_model_name == "typeclass": @@ -166,8 +182,8 @@ def to_attr(data): if db_model_name == "typeclass": # typeclass cannot help us, we want the actual child object model name db_model_name = GA(data.dbobj, "db_model_name") - return ("dbobj", PackedDBobject(data.id, db_model_name, data.db_key)) - elif hasattr(data, "__iter__"): + return ("dbobj", PackedDBobject(data.id, db_model_name, data.db_key)) + elif hasattr(data, "__iter__"): return ("iter", iter_db2id(data)) else: return ("simple", data) @@ -175,21 +191,21 @@ def to_attr(data): def from_attr(attr, datatuple): """ Retrieve data from a previously stored attribute. This - is always a dict with keys type and data. + is always a dict with keys type and data. - datatuple comes from the database storage and has - the following format: + datatuple comes from the database storage and has + the following format: (simple|dbobj|iter, ) where simple - a single non-db object, like a string. is returned as-is. - dbobj - a single dbobj-id. This id is retrieved back from the database. + dbobj - a single dbobj-id. This id is retrieved back from the database. iter - an iterable. This is traversed iteratively, converting all found - dbobj-ids back to objects. Also, all lists and dictionaries are - returned as their PackedList/PackedDict counterparts in order to + dbobj-ids back to objects. Also, all lists and dictionaries are + returned as their PackedList/PackedDict counterparts in order to allow in-place assignment such as obj.db.mylist[3] = val. Mylist - is then a PackedList that saves the data on the fly. + is then a PackedList that saves the data on the fly. """ - # nested functions + # nested functions def id2db(data): """ Convert db-stored dbref back to object @@ -202,39 +218,39 @@ def from_attr(attr, datatuple): try: return mclass.objects.get(id=data.id) except mclass.DoesNotExist: # could happen if object was deleted in the interim. - return None + return None def iter_id2db(item): """ Recursively looping through stored iterables, replacing ids with actual objects. We return PackedDict and PackedLists instead of normal lists; this is needed in order for the user to do dynamic saving of nested in-place, such as obj.db.attrlist[2]=3. What is - stored in the database are however always normal python primitives. + stored in the database are however always normal python primitives. """ dtype = type(item) if dtype in (basestring, int, float): # check the most common types first, for speed - return item + return item elif dtype == PackedDBobject: return id2db(item) - elif dtype == tuple: + elif dtype == tuple: return tuple([iter_id2db(val) for val in item]) elif dtype in (dict, PackedDict): return attr, dict(zip([key for key in item.keys()], [iter_id2db(val) for val in item.values()])) elif hasattr(item, '__iter__'): return list(iter_id2db(val) for val in item) - else: - return item + else: + return item typ, data = datatuple - if typ == 'simple': + if typ == 'simple': # single non-db objects return data - elif typ == 'dbobj': - # a single stored dbobj + elif typ == 'dbobj': + # a single stored dbobj return id2db(data) - elif typ == 'iter': + elif typ == 'iter': # all types of iterables return iter_id2db(data) diff --git a/src/scripts/migrations/0011_convert_attrdata.py b/src/scripts/migrations/0011_convert_attrdata.py new file mode 100644 index 0000000000..f0208320a9 --- /dev/null +++ b/src/scripts/migrations/0011_convert_attrdata.py @@ -0,0 +1,515 @@ +# -*- coding: utf-8 -*- +import datetime +from south.db import db +from south.v2 import DataMigration +from django.db import models + +from django.contrib.contenttypes.models import ContentType + +from src.utils.utils import to_str +from src.utils.dbserialize import to_pickle +try: + import cPickle as pickle +except ImportError: + import pickle +CTYPEGET = ContentType.objects.get +GA = object.__getattribute__ +SA = object.__setattr__ +DA = object.__delattr__ + + +# overloading pickle to have it find the PackedDBobj in this module +import pickle + +try: + from cStringIO import StringIO +except ImportError: + from StringIO import StringIO + +renametable = { + 'src.typeclasses.models': 'src.scripts.migrations.0011_convert_attrdata', + 'PackedDBobject': 'PackedDBobject', +} + +def mapname(name): + if name in renametable: + return renametable[name] + return name + +def mapped_load_global(self): + module = mapname(self.readline()[:-1]) + name = mapname(self.readline()[:-1]) + klass = self.find_class(module, name) + self.append(klass) + +def loads(str): + file = StringIO(str) + unpickler = pickle.Unpickler(file) + unpickler.dispatch[pickle.GLOBAL] = mapped_load_global + return unpickler.load() + + + +class PackedDBobject(object): + """ + Attribute helper class. + A container for storing and easily identifying database objects in + the database (which doesn't suppport storing db_objects directly). + """ + def __init__(self, ID, db_model, db_key): + self.id = ID + self.db_model = db_model + self.key = db_key + def __str__(self): + return "%s(#%s)" % (self.key, self.id) + def __unicode__(self): + return u"%s(#%s)" % (self.key, self.id) + +class PackedDict(dict): + """ + Attribute helper class. + A variant of dict that stores itself to the database when + updating one of its keys. This is called and handled by + Attribute.validate_data(). + """ + def __init__(self, db_obj, *args, **kwargs): + """ + Sets up the packing dict. The db_store variable + is set by Attribute.validate_data() when returned in + order to allow custom updates to the dict. + + db_obj - the Attribute object storing this dict. + + The 'parent' property is set to 'init' at creation, + this stops the system from saving itself over and over + when first assigning the dict. Once initialization + is over, the Attribute from_attr() method will assign + the parent (or None, if at the root) + + """ + self.db_obj = db_obj + self.parent = 'init' + super(PackedDict, self).__init__(*args, **kwargs) + def __str__(self): + return "{%s}" % ", ".join("%s:%s" % (key, str(val)) for key, val in self.items()) + def save(self): + "Relay save operation upwards in tree until we hit the root." + if self.parent == 'init': + pass + elif self.parent: + self.parent.save() + else: + self.db_obj.value = self + def __setitem__(self, *args, **kwargs): + "assign item to this dict" + super(PackedDict, self).__setitem__(*args, **kwargs) + self.save() + def __delitem__(self, *args, **kwargs): + "delete with del self[key]" + super(PackedDict, self).__delitem__(*args, **kwargs) + self.save() + def clear(self, *args, **kwargs): + "Custom clear" + super(PackedDict, self).clear(*args, **kwargs) + self.save() + def pop(self, *args, **kwargs): + "Custom pop" + ret = super(PackedDict, self).pop(*args, **kwargs) + self.save() + return ret + def popitem(self, *args, **kwargs): + "Custom popitem" + ret = super(PackedDict, self).popitem(*args, **kwargs) + self.save() + return ret + def setdefault(self, *args, **kwargs): + "Custom setdefault" + super(PackedDict, self).setdefault(*args, **kwargs) + self.save() + def update(self, *args, **kwargs): + "Custom update" + super(PackedDict, self).update(*args, **kwargs) + self.save() + +class PackedList(list): + """ + Attribute helper class. + A variant of list that stores itself to the database when + updating one of its keys. This is called and handled by + Attribute.validate_data(). + """ + def __init__(self, db_obj, *args, **kwargs): + """ + sets up the packing list. + db_obj - the attribute object storing this list. + + the 'parent' property is set to 'init' at creation, + this stops the system from saving itself over and over + when first assigning the dict. once initialization + is over, the attribute from_attr() method will assign + the parent (or none, if at the root) + + """ + self.db_obj = db_obj + self.parent = 'init' + super(PackedList, self).__init__(*args, **kwargs) + def __str__(self): + return "[%s]" % ", ".join(str(val) for val in self) + def save(self): + "relay save operation upwards in tree until we hit the root." + if self.parent == 'init': + pass + elif self.parent: + self.parent.save() + else: + self.db_obj.value = self + def __setitem__(self, *args, **kwargs): + "Custom setitem that stores changed list to database." + super(PackedList, self).__setitem__(*args, **kwargs) + self.save() + def __delitem__(self, *args, **kwargs): + "delete with del self[index]" + super(PackedList, self).__delitem__(*args, **kwargs) + self.save() + def append(self, *args, **kwargs): + "Custom append" + super(PackedList, self).append(*args, **kwargs) + self.save() + def extend(self, *args, **kwargs): + "Custom extend" + super(PackedList, self).extend(*args, **kwargs) + self.save() + def insert(self, *args, **kwargs): + "Custom insert" + super(PackedList, self).insert(*args, **kwargs) + self.save() + def remove(self, *args, **kwargs): + "Custom remove" + super(PackedList, self).remove(*args, **kwargs) + self.save() + def pop(self, *args, **kwargs): + "Custom pop" + ret = super(PackedList, self).pop(*args, **kwargs) + self.save() + return ret + def reverse(self, *args, **kwargs): + "Custom reverse" + super(PackedList, self).reverse(*args, **kwargs) + self.save() + def sort(self, *args, **kwargs): + "Custom sort" + super(PackedList, self).sort(*args, **kwargs) + self.save() + +class PackedSet(set): + """ + A variant of Set that stores new updates to the databse. + """ + def __init__(self, db_obj, *args, **kwargs): + """ + sets up the packing set. + db_obj - the attribute object storing this set + + the 'parent' property is set to 'init' at creation, + this stops the system from saving itself over and over + when first assigning the dict. once initialization + is over, the attribute from_attr() method will assign + the parent (or none, if at the root) + + """ + self.db_obj = db_obj + self.parent = 'init' + super(PackedSet, self).__init__(*args, **kwargs) + def __str__(self): + return "{%s}" % ", ".join(str(val) for val in self) + def save(self): + "relay save operation upwards in tree until we hit the root." + if self.parent == 'init': + pass + elif self.parent: + self.parent.save() + else: + self.db_obj.value = self + def add(self, *args, **kwargs): + "Add an element to the set" + super(PackedSet, self).add(*args, **kwargs) + self.save() + def clear(self, *args, **kwargs): + "Remove all elements from this set" + super(PackedSet, self).clear(*args, **kwargs) + self.save() + def difference_update(self, *args, **kwargs): + "Remove all elements of another set from this set." + super(PackedSet, self).difference_update(*args, **kwargs) + self.save() + def discard(self, *args, **kwargs): + "Remove an element from a set if it is a member.\nIf not a member, do nothing." + super(PackedSet, self).discard(*args, **kwargs) + self.save() + def intersection_update(self, *args, **kwargs): + "Update a set with the intersection of itself and another." + super(PackedSet, self).intersection_update(*args, **kwargs) + self.save() + def pop(self, *args, **kwargs): + "Remove and return an arbitrary set element.\nRaises KeyError if the set is empty." + super(PackedSet, self).pop(*args, **kwargs) + self.save() + def remove(self, *args, **kwargs): + "Remove an element from a set; it must be a member.\nIf the element is not a member, raise a KeyError." + super(PackedSet, self).remove(*args, **kwargs) + self.save() + def symmetric_difference_update(self, *args, **kwargs): + "Update a set with the symmetric difference of itself and another." + super(PackedSet, self).symmetric_difference_update(*args, **kwargs) + self.save() + def update(self, *args, **kwargs): + "Update a set with the union of itself and others." + super(PackedSet, self).update(*args, **kwargs) + self.save() + +def to_attr(data): + """ + Convert data to proper attr data format before saving + + We have to make sure to not store database objects raw, since + this will crash the system. Instead we must store their IDs + and make sure to convert back when the attribute is read back + later. + + Due to this it's criticial that we check all iterables + recursively, converting all found database objects to a form + the database can handle. We handle lists, tuples and dicts + (and any nested combination of them) this way, all other + iterables are stored and returned as lists. + + data storage format: + (simple|dbobj|iter, ) + where + simple - a single non-db object, like a string or number + dbobj - a single dbobj + iter - any iterable object - will be looped over recursively + to convert dbobj->id. + + """ + + def iter_db2id(item): + """ + recursively looping through stored iterables, replacing objects with ids. + (Python only builds nested functions once, so there is no overhead for nesting) + """ + dtype = type(item) + if dtype in (basestring, int, float): # check the most common types first, for speed + return item + elif hasattr(item, "id") and hasattr(item, "db_model_name") and hasattr(item, "db_key"): + db_model_name = item.db_model_name + if db_model_name == "typeclass": + db_model_name = GA(item.dbobj, "db_model_name") + return PackedDBobject(item.id, db_model_name, item.db_key) + elif dtype == tuple: + return tuple(iter_db2id(val) for val in item) + elif dtype in (dict, PackedDict): + return dict((key, iter_db2id(val)) for key, val in item.items()) + elif hasattr(item, '__iter__'): + return list(iter_db2id(val) for val in item) + else: + return item + + dtype = type(data) + + if dtype in (basestring, int, float): + return ("simple",data) + elif hasattr(data, "id") and hasattr(data, "db_model_name") and hasattr(data, 'db_key'): + # all django models (objectdb,scriptdb,playerdb,channel,msg,typeclass) + # have the protected property db_model_name hardcoded on themselves for speed. + db_model_name = data.db_model_name + if db_model_name == "typeclass": + # typeclass cannot help us, we want the actual child object model name + db_model_name = GA(data.dbobj, "db_model_name") + return ("dbobj", PackedDBobject(data.id, db_model_name, data.db_key)) + elif hasattr(data, "__iter__"): + return ("iter", iter_db2id(data)) + else: + return ("simple", data) + + +class Migration(DataMigration): + + def forwards(self, orm): + "Write your forwards methods here." + # Note: Remember to use orm['appname.ModelName'] rather than "from appname.models..." + + # modified for migration - converts to plain python properties + def from_attr(datatuple): + """ + Retrieve data from a previously stored attribute. This + is always a dict with keys type and data. + + datatuple comes from the database storage and has + the following format: + (simple|dbobj|iter, ) + where + simple - a single non-db object, like a string. is returned as-is. + dbobj - a single dbobj-id. This id is retrieved back from the database. + iter - an iterable. This is traversed iteratively, converting all found + dbobj-ids back to objects. Also, all lists and dictionaries are + returned as their PackedList/PackedDict counterparts in order to + allow in-place assignment such as obj.db.mylist[3] = val. Mylist + is then a PackedList that saves the data on the fly. + """ + # nested functions + def id2db(data): + """ + Convert db-stored dbref back to object + """ + mclass = orm[data.db_model].model_class() + #mclass = CTYPEGET(model=data.db_model).model_class() + try: + return mclass.objects.get(id=data.id) + + except AttributeError: + try: + return mclass.objects.get(id=data.id) + except mclass.DoesNotExist: # could happen if object was deleted in the interim. + return None + + def iter_id2db(item): + """ + Recursively looping through stored iterables, replacing ids with actual objects. + We return PackedDict and PackedLists instead of normal lists; this is needed in order for + the user to do dynamic saving of nested in-place, such as obj.db.attrlist[2]=3. What is + stored in the database are however always normal python primitives. + """ + dtype = type(item) + if dtype in (basestring, int, float, long, bool): # check the most common types first, for speed + return item + elif dtype == PackedDBobject or hasattr(item, '__class__') and item.__class__.__name__ == "PackedDBobject": + return id2db(item) + elif dtype == tuple: + return tuple([iter_id2db(val) for val in item]) + elif dtype in (dict, PackedDict): + return dict(zip([key for key in item.keys()], + [iter_id2db(val) for val in item.values()])) + elif hasattr(item, '__iter__'): + return list(iter_id2db(val) for val in item) + else: + return item + + typ, data = datatuple + + if typ == 'simple': + # single non-db objects + return data + elif typ == 'dbobj': + # a single stored dbobj + return id2db(data) + elif typ == 'iter': + # all types of iterables + return iter_id2db(data) + + if not db.dry_run: + for attr in orm['scripts.ScriptAttribute'].objects.all(): + # repack attr into new format and reimport + datatuple = loads(to_str(attr.db_value)) + python_data = from_attr(datatuple) + new_data = to_pickle(python_data) + attr.db_value2 = new_data # new pickleObjectField + attr.save() + + def backwards(self, orm): + "Write your backwards methods here." + raise RuntimeError("This migration cannot be reversed.") + + models = { + 'auth.group': { + 'Meta': {'object_name': 'Group'}, + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}), + 'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}) + }, + 'auth.permission': { + 'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'}, + 'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}), + 'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'name': ('django.db.models.fields.CharField', [], {'max_length': '50'}) + }, + 'auth.user': { + 'Meta': {'object_name': 'User'}, + 'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), + 'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}), + 'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}), + 'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}), + 'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), + 'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), + 'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), + 'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}), + 'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}), + 'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}), + 'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'}) + }, + 'contenttypes.contenttype': { + 'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"}, + 'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}), + 'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}) + }, + 'objects.objectdb': { + 'Meta': {'object_name': 'ObjectDB'}, + 'db_cmdset_storage': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}), + 'db_date_created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}), + 'db_destination': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'destinations_set'", 'null': 'True', 'to': "orm['objects.ObjectDB']"}), + 'db_home': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'homes_set'", 'null': 'True', 'to': "orm['objects.ObjectDB']"}), + 'db_key': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}), + 'db_location': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'locations_set'", 'null': 'True', 'to': "orm['objects.ObjectDB']"}), + 'db_lock_storage': ('django.db.models.fields.TextField', [], {'blank': 'True'}), + 'db_permissions': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}), + 'db_player': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['players.PlayerDB']", 'null': 'True', 'blank': 'True'}), + 'db_sessid': ('django.db.models.fields.IntegerField', [], {'null': 'True'}), + 'db_typeclass_path': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True'}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}) + }, + 'players.playerdb': { + 'Meta': {'object_name': 'PlayerDB'}, + 'db_cmdset_storage': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True'}), + 'db_date_created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}), + 'db_is_connected': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), + 'db_key': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}), + 'db_lock_storage': ('django.db.models.fields.TextField', [], {'blank': 'True'}), + 'db_permissions': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}), + 'db_typeclass_path': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True'}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'unique': 'True'}) + }, + 'scripts.scriptattribute': { + 'Meta': {'object_name': 'ScriptAttribute'}, + 'db_date_created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}), + 'db_key': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}), + 'db_lock_storage': ('django.db.models.fields.TextField', [], {'blank': 'True'}), + 'db_obj': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['scripts.ScriptDB']"}), + 'db_value': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}), + 'db_value2': ('src.utils.picklefield.PickledObjectField', [], {'null': 'True'}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}) + }, + 'scripts.scriptdb': { + 'Meta': {'object_name': 'ScriptDB'}, + 'db_date_created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}), + 'db_desc': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}), + 'db_interval': ('django.db.models.fields.IntegerField', [], {'default': '-1'}), + 'db_is_active': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), + 'db_key': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}), + 'db_lock_storage': ('django.db.models.fields.TextField', [], {'blank': 'True'}), + 'db_obj': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['objects.ObjectDB']", 'null': 'True', 'blank': 'True'}), + 'db_permissions': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}), + 'db_persistent': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), + 'db_repeats': ('django.db.models.fields.IntegerField', [], {'default': '0'}), + 'db_start_delay': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), + 'db_typeclass_path': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True'}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}) + } + } + + complete_apps = ['scripts'] + symmetrical = True diff --git a/src/typeclasses/models.py b/src/typeclasses/models.py index 9f5a33fe7d..1feab9d78f 100644 --- a/src/typeclasses/models.py +++ b/src/typeclasses/models.py @@ -68,222 +68,6 @@ _DA = object.__delattr__ # #------------------------------------------------------------ -class PackedDBobject(object): - """ - Attribute helper class. - A container for storing and easily identifying database objects in - the database (which doesn't suppport storing db_objects directly). - """ - def __init__(self, ID, db_model, db_key): - self.id = ID - self.db_model = db_model - self.key = db_key - def __str__(self): - return "%s(#%s)" % (self.key, self.id) - def __unicode__(self): - return u"%s(#%s)" % (self.key, self.id) - -class PackedDict(dict): - """ - Attribute helper class. - A variant of dict that stores itself to the database when - updating one of its keys. This is called and handled by - Attribute.validate_data(). - """ - def __init__(self, db_obj, *args, **kwargs): - """ - Sets up the packing dict. The db_store variable - is set by Attribute.validate_data() when returned in - order to allow custom updates to the dict. - - db_obj - the Attribute object storing this dict. - - The 'parent' property is set to 'init' at creation, - this stops the system from saving itself over and over - when first assigning the dict. Once initialization - is over, the Attribute from_attr() method will assign - the parent (or None, if at the root) - - """ - self.db_obj = db_obj - self.parent = 'init' - super(PackedDict, self).__init__(*args, **kwargs) - def __str__(self): - return "{%s}" % ", ".join("%s:%s" % (key, str(val)) for key, val in self.items()) - def save(self): - "Relay save operation upwards in tree until we hit the root." - if self.parent == 'init': - pass - elif self.parent: - self.parent.save() - else: - self.db_obj.value = self - def __setitem__(self, *args, **kwargs): - "assign item to this dict" - super(PackedDict, self).__setitem__(*args, **kwargs) - self.save() - def __delitem__(self, *args, **kwargs): - "delete with del self[key]" - super(PackedDict, self).__delitem__(*args, **kwargs) - self.save() - def clear(self, *args, **kwargs): - "Custom clear" - super(PackedDict, self).clear(*args, **kwargs) - self.save() - def pop(self, *args, **kwargs): - "Custom pop" - ret = super(PackedDict, self).pop(*args, **kwargs) - self.save() - return ret - def popitem(self, *args, **kwargs): - "Custom popitem" - ret = super(PackedDict, self).popitem(*args, **kwargs) - self.save() - return ret - def setdefault(self, *args, **kwargs): - "Custom setdefault" - super(PackedDict, self).setdefault(*args, **kwargs) - self.save() - def update(self, *args, **kwargs): - "Custom update" - super(PackedDict, self).update(*args, **kwargs) - self.save() - -class PackedList(list): - """ - Attribute helper class. - A variant of list that stores itself to the database when - updating one of its keys. This is called and handled by - Attribute.validate_data(). - """ - def __init__(self, db_obj, *args, **kwargs): - """ - sets up the packing list. - db_obj - the attribute object storing this list. - - the 'parent' property is set to 'init' at creation, - this stops the system from saving itself over and over - when first assigning the dict. once initialization - is over, the attribute from_attr() method will assign - the parent (or none, if at the root) - - """ - self.db_obj = db_obj - self.parent = 'init' - super(PackedList, self).__init__(*args, **kwargs) - def __str__(self): - return "[%s]" % ", ".join(str(val) for val in self) - def save(self): - "relay save operation upwards in tree until we hit the root." - if self.parent == 'init': - pass - elif self.parent: - self.parent.save() - else: - self.db_obj.value = self - def __setitem__(self, *args, **kwargs): - "Custom setitem that stores changed list to database." - super(PackedList, self).__setitem__(*args, **kwargs) - self.save() - def __delitem__(self, *args, **kwargs): - "delete with del self[index]" - super(PackedList, self).__delitem__(*args, **kwargs) - self.save() - def append(self, *args, **kwargs): - "Custom append" - super(PackedList, self).append(*args, **kwargs) - self.save() - def extend(self, *args, **kwargs): - "Custom extend" - super(PackedList, self).extend(*args, **kwargs) - self.save() - def insert(self, *args, **kwargs): - "Custom insert" - super(PackedList, self).insert(*args, **kwargs) - self.save() - def remove(self, *args, **kwargs): - "Custom remove" - super(PackedList, self).remove(*args, **kwargs) - self.save() - def pop(self, *args, **kwargs): - "Custom pop" - ret = super(PackedList, self).pop(*args, **kwargs) - self.save() - return ret - def reverse(self, *args, **kwargs): - "Custom reverse" - super(PackedList, self).reverse(*args, **kwargs) - self.save() - def sort(self, *args, **kwargs): - "Custom sort" - super(PackedList, self).sort(*args, **kwargs) - self.save() - -class PackedSet(set): - """ - A variant of Set that stores new updates to the databse. - """ - def __init__(self, db_obj, *args, **kwargs): - """ - sets up the packing set. - db_obj - the attribute object storing this set - - the 'parent' property is set to 'init' at creation, - this stops the system from saving itself over and over - when first assigning the dict. once initialization - is over, the attribute from_attr() method will assign - the parent (or none, if at the root) - - """ - self.db_obj = db_obj - self.parent = 'init' - super(PackedSet, self).__init__(*args, **kwargs) - def __str__(self): - return "{%s}" % ", ".join(str(val) for val in self) - def save(self): - "relay save operation upwards in tree until we hit the root." - if self.parent == 'init': - pass - elif self.parent: - self.parent.save() - else: - self.db_obj.value = self - def add(self, *args, **kwargs): - "Add an element to the set" - super(PackedSet, self).add(*args, **kwargs) - self.save() - def clear(self, *args, **kwargs): - "Remove all elements from this set" - super(PackedSet, self).clear(*args, **kwargs) - self.save() - def difference_update(self, *args, **kwargs): - "Remove all elements of another set from this set." - super(PackedSet, self).difference_update(*args, **kwargs) - self.save() - def discard(self, *args, **kwargs): - "Remove an element from a set if it is a member.\nIf not a member, do nothing." - super(PackedSet, self).discard(*args, **kwargs) - self.save() - def intersection_update(self, *args, **kwargs): - "Update a set with the intersection of itself and another." - super(PackedSet, self).intersection_update(*args, **kwargs) - self.save() - def pop(self, *args, **kwargs): - "Remove and return an arbitrary set element.\nRaises KeyError if the set is empty." - super(PackedSet, self).pop(*args, **kwargs) - self.save() - def remove(self, *args, **kwargs): - "Remove an element from a set; it must be a member.\nIf the element is not a member, raise a KeyError." - super(PackedSet, self).remove(*args, **kwargs) - self.save() - def symmetric_difference_update(self, *args, **kwargs): - "Update a set with the symmetric difference of itself and another." - super(PackedSet, self).symmetric_difference_update(*args, **kwargs) - self.save() - def update(self, *args, **kwargs): - "Update a set with the union of itself and others." - super(PackedSet, self).update(*args, **kwargs) - self.save() class Attribute(SharedMemoryModel): """ diff --git a/src/utils/dbserialize.py b/src/utils/dbserialize.py index 091d2f3104..6ccc4e5a5a 100644 --- a/src/utils/dbserialize.py +++ b/src/utils/dbserialize.py @@ -84,7 +84,7 @@ class _SaverMutable(object): self._parent._save_tree() elif self._db_obj: self._db_obj.value = self - logger.log_err("_SaverMutable %s has no root Attribute to save to." % self) + logger.log_errmsg("_SaverMutable %s has no root Attribute to save to." % self) def _convert_mutables(self, data): "converts mutables to Saver* variants and assigns .parent property" def process_tree(item, parent): @@ -260,7 +260,7 @@ def from_pickle(data, db_obj=None): return item def process_tree(item, parent): - "Recursive processor, convertion and identification of data" + "Recursive processor, building a parent-tree from iterable data" dtype = type(item) if dtype in (basestring, int, long, float, bool): return item