new de/serialization logic

This commit is contained in:
Alessandro Ogier 2022-07-24 18:50:52 +02:00
parent 26d4ede5c6
commit c706d49c13
No known key found for this signature in database
GPG key ID: 39BC069A69E8D4F8
2 changed files with 115 additions and 55 deletions

View file

@ -2,6 +2,7 @@
Unit tests for typeclass base system
"""
from django.test import override_settings
from evennia.typeclasses import attributes
from evennia.utils.test_resources import BaseEvenniaTest, EvenniaTestCase
@ -26,13 +27,24 @@ class TestAttributes(BaseEvenniaTest):
self.obj1.db.testattr = value
self.assertEqual(self.obj1.db.testattr, value)
# "plain" subclasses
value = DictSubclass({"fo": "foo", "bar": "bar"})
self.obj1.db.testattr = value
self.assertEqual(self.obj1.db.testattr, value)
value = DictSubclass({"fo": "foo", "bar": "bar", "obj": self.obj2})
self.obj1.db.testattr = value
self.obj1.db.testattr["fo"] = "foo2"
value.update({"fo": "foo2"})
self.assertEqual(self.obj1.db.testattr, value)
self.assertEqual(self.obj1.attributes.get("testattr"), value)
# nested subclasses
value = DictSubclass({"nested": True, "deep": DictSubclass({"fo": "foo", "bar": "bar"})})
self.obj1.db.testattr = value
self.obj1.db.testattr["deep"]["fo"] = "nemo"
value["deep"].update({"fo": "nemo"})
self.assertEqual(self.obj1.db.testattr, value)
self.assertEqual(self.obj1.attributes.get("testattr"), value)
@override_settings(TYPECLASS_AGGRESSIVE_CACHE=False)
@patch("evennia.typeclasses.attributes._TYPECLASS_AGGRESSIVE_CACHE", False)
@ -47,14 +59,25 @@ class TestAttributes(BaseEvenniaTest):
self.assertEqual(self.obj1.db.testattr, value)
self.assertFalse(self.obj1.attributes.backend._cache)
# "plain" subclasses
value = DictSubclass({"fo": "foo", "bar": "bar"})
self.obj1.db.dict_subclass = value
self.assertEqual(self.obj1.db.dict_subclass, value)
self.obj1.db.testattr = value
self.assertEqual(self.obj1.db.testattr, value)
self.obj1.db.testattr["fo"] = "foo2"
value.update({"fo": "foo2"})
self.assertEqual(self.obj1.db.testattr, value)
self.assertEqual(self.obj1.attributes.get("testattr"), value)
self.assertFalse(self.obj1.attributes.backend._cache)
value = DictSubclass({"fo": "foo", "bar": "bar", "obj": self.obj2})
self.obj1.db.dict_subclass = value
self.assertEqual(self.obj1.db.dict_subclass, value)
# nested subclasses
value = DictSubclass({"nested": True, "deep": DictSubclass({"fo": "foo", "bar": "bar"})})
self.obj1.db.testattr = value
self.obj1.db.testattr["deep"]["fo"] = "nemo"
value["deep"].update({"fo": "nemo"})
self.assertEqual(self.obj1.db.testattr, value)
self.assertEqual(self.obj1.attributes.get("testattr"), value)
self.assertFalse(self.obj1.attributes.backend._cache)
def test_weird_text_save(self):

View file

@ -263,7 +263,7 @@ class _SaverList(_SaverMutable, MutableSequence):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self._data = list()
self._data = kwargs.pop("_class", list)()
@_save
def __iadd__(self, otherlist):
@ -307,7 +307,7 @@ class _SaverDict(_SaverMutable, MutableMapping):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self._data = dict()
self._data = kwargs.pop("_class", dict)()
def has_key(self, key):
return key in self._data
@ -644,20 +644,21 @@ def to_pickle(data):
# classmethods) and instances
pass
if hasattr(item, "items"):
# we try to conserve the iterable class, if not convert to dict
if hasattr(item, "__iter__"):
try:
return item.__class__(
(process_item(key), process_item(val)) for key, val in item.items()
)
except (AttributeError, TypeError):
return {process_item(key): process_item(val) for key, val in item.items()}
elif hasattr(item, "__iter__"):
# we try to conserve the iterable class, if not convert to list
try:
return item.__class__([process_item(val) for val in item])
except (AttributeError, TypeError):
return [process_item(val) for val in item]
# we try to conserve the iterable class, if not convert to dict
try:
return item.__class__(
(process_item(key), process_item(val)) for key, val in item.items()
)
except (AttributeError, TypeError):
return {process_item(key): process_item(val) for key, val in item.items()}
except:
# we try to conserve the iterable class, if not convert to list
try:
return item.__class__([process_item(val) for val in item])
except (AttributeError, TypeError):
return [process_item(val) for val in item]
elif hasattr(item, "sessid") and hasattr(item, "conn_time"):
return pack_session(item)
try:
@ -720,21 +721,22 @@ def from_pickle(data, db_obj=None):
return OrderedDict((process_item(key), process_item(val)) for key, val in item.items())
elif dtype == deque:
return deque(process_item(val) for val in item)
elif hasattr(item, "items"):
# we try to conserve the iterable class, if not convert to dict
try:
return item.__class__(
(process_item(key), process_item(val)) for key, val in item.items()
)
except (AttributeError, TypeError):
return {process_item(key): process_item(val) for key, val in item.items()}
elif hasattr(item, "__iter__"):
try:
# we try to conserve the iterable class if
# it accepts an iterator
return item.__class__(process_item(val) for val in item)
except (AttributeError, TypeError):
return [process_item(val) for val in item]
# we try to conserve the iterable class, if not convert to dict
try:
return item.__class__(
(process_item(key), process_item(val)) for key, val in item.items()
)
except (AttributeError, TypeError):
return {process_item(key): process_item(val) for key, val in item.items()}
except:
try:
# we try to conserve the iterable class if
# it accepts an iterator
return item.__class__(process_item(val) for val in item)
except (AttributeError, TypeError):
return [process_item(val) for val in item]
if hasattr(item, "__deserialize_dbobjs__"):
# this allows the object to custom-deserialize any embedded dbobjs
@ -794,27 +796,32 @@ def from_pickle(data, db_obj=None):
dat = _SaverDeque(_parent=parent)
dat._data.extend(process_item(val) for val in item)
return dat
elif hasattr(item, "items"):
# we try to conserve the iterable class, if not convert to dict
try:
return item.__class__(
(process_item(key), process_tree(val, parent)) for key, val in item.items()
)
except (AttributeError, TypeError):
dat = _SaverDict(_parent=parent)
dat._data.update(
(process_item(key), process_tree(val, dat)) for key, val in item.items()
)
return dat
elif hasattr(item, "__iter__"):
try:
# we try to conserve the iterable class if it
# accepts an iterator
return item.__class__(process_tree(val, parent) for val in item)
except (AttributeError, TypeError):
dat = _SaverList(_parent=parent)
dat._data.extend(process_tree(val, dat) for val in item)
return dat
# we try to conserve the iterable class, if not convert to dict
try:
dat = _SaverDict(_parent=parent, _class=item.__class__)
dat._data.update(
(process_item(key), process_tree(val, dat)) for key, val in item.items()
)
return dat
except (AttributeError, TypeError):
dat = _SaverDict(_parent=parent)
dat._data.update(
(process_item(key), process_tree(val, dat)) for key, val in item.items()
)
return dat
except:
try:
# we try to conserve the iterable class if it
# accepts an iterator
dat = _SaverList(_parent=parent, _class=item.__class__)
dat._data.extend(process_tree(val, dat) for val in item)
return dat
except (AttributeError, TypeError):
dat = _SaverList(_parent=parent)
dat._data.extend(process_tree(val, dat) for val in item)
return dat
if hasattr(item, "__deserialize_dbobjs__"):
try:
@ -828,7 +835,9 @@ def from_pickle(data, db_obj=None):
# convert lists, dicts and sets to their Saved* counterparts. It
# is only relevant if the "root" is an iterable of the right type.
dtype = type(data)
if dtype == list:
if dtype in (str, int, float, bool, bytes, SafeString, tuple):
return process_item(data)
elif dtype == list:
dat = _SaverList(_db_obj=db_obj)
dat._data.extend(process_tree(val, dat) for val in data)
return dat
@ -858,6 +867,34 @@ def from_pickle(data, db_obj=None):
dat = _SaverDeque(_db_obj=db_obj)
dat._data.extend(process_item(val) for val in data)
return dat
elif hasattr(data, "__iter__"):
try:
# we try to conserve the iterable class, if not convert to dict
try:
dat = _SaverDict(_db_obj=db_obj, _class=data.__class__)
dat._data.update(
(process_item(key), process_tree(val, dat)) for key, val in data.items()
)
return dat
except (AttributeError, TypeError):
dat = _SaverDict(_db_obj=db_obj)
dat._data.update(
(process_item(key), process_tree(val, dat)) for key, val in data.items()
)
return dat
except:
try:
# we try to conserve the iterable class if it
# accepts an iterator
dat = _SaverList(_db_obj=db_obj, _class=data.__class__)
dat._data.extend(process_tree(val, dat) for val in data)
return dat
except (AttributeError, TypeError):
dat = _SaverList(_db_obj=db_obj)
dat._data.extend(process_tree(val, dat) for val in data)
return dat
return process_item(data)