diff --git a/docs/0.9.5/.buildinfo b/docs/0.9.5/.buildinfo
index c0fdf6bcda..68fab0c48e 100644
--- a/docs/0.9.5/.buildinfo
+++ b/docs/0.9.5/.buildinfo
@@ -1,4 +1,4 @@
# Sphinx build info version 1
# This file hashes the configuration used when building these files. When it is not found, a full rebuild will be done.
-config: b01f355158aa6c74c79cada7b3422d47
+config: 917ec9a29ad49f15a32e52a716a58d16
tags: 645f666f9bcd5a90fca523b33c5a78b7
diff --git a/docs/0.9.5/Default-Commands.html b/docs/0.9.5/Default-Commands.html
index 138ee2b978..ad6bea6c52 100644
--- a/docs/0.9.5/Default-Commands.html
+++ b/docs/0.9.5/Default-Commands.html
@@ -55,25 +55,25 @@ with
__unloggedin_look_command [l, look] (cmdset: UnloggedinCmdSet, help-category: General) about [version] (cmdset: CharacterCmdSet, help-category: System) access [groups, hierarchy] (cmdset: CharacterCmdSet, help-category: General) accounts [listaccounts, account] (cmdset: CharacterCmdSet, help-category: System) access [hierarchy, groups] (cmdset: CharacterCmdSet, help-category: General) accounts [account, listaccounts] (cmdset: CharacterCmdSet, help-category: System) addcom [chanalias, aliaschan] (cmdset: AccountCmdSet, help-category: Comms) alias [setobjalias] (cmdset: CharacterCmdSet, help-category: Building) allcom (cmdset: AccountCmdSet, help-category: Comms) batchcode [batchcodes] (cmdset: CharacterCmdSet, help-category: Building) batchcommands [batchcommand, batchcmd] (cmdset: CharacterCmdSet, help-category: Building) batchcommands [batchcmd, batchcommand] (cmdset: CharacterCmdSet, help-category: Building) cboot (cmdset: AccountCmdSet, help-category: Comms) ccreate [channelcreate] (cmdset: AccountCmdSet, help-category: Comms) cdesc (cmdset: AccountCmdSet, help-category: Comms) cdestroy (cmdset: AccountCmdSet, help-category: Comms) cemit [cmsg] (cmdset: AccountCmdSet, help-category: Comms) channels [channellist, comlist, clist, chanlist, all channels] (cmdset: AccountCmdSet, help-category: Comms) channels [clist, channellist, chanlist, all channels, comlist] (cmdset: AccountCmdSet, help-category: Comms) charcreate (cmdset: AccountCmdSet, help-category: General) chardelete (cmdset: AccountCmdSet, help-category: General) clock (cmdset: AccountCmdSet, help-category: Comms) cmdsets [listcmsets] (cmdset: CharacterCmdSet, help-category: Building) color (cmdset: AccountCmdSet, help-category: General) connect [conn, con, co] (cmdset: UnloggedinCmdSet, help-category: General) connect [con, conn, co] (cmdset: UnloggedinCmdSet, help-category: General) copy (cmdset: CharacterCmdSet, help-category: Building) cpattr (cmdset: CharacterCmdSet, help-category: Building) create (cmdset: CharacterCmdSet, help-category: Building)
encoding [encode] (cmdset: UnloggedinCmdSet, help-category: General)
examine [exam, ex] (cmdset: AccountCmdSet, help-category: Building)
find [search, locate] (cmdset: CharacterCmdSet, help-category: Building)
find [locate, search] (cmdset: CharacterCmdSet, help-category: Building)
get [grab] (cmdset: CharacterCmdSet, help-category: General)
give (cmdset: CharacterCmdSet, help-category: General)
grapevine2chan (cmdset: AccountCmdSet, help-category: Comms)
ic [puppet] (cmdset: AccountCmdSet, help-category: General)
info (cmdset: UnloggedinCmdSet, help-category: General)
inventory [inv, i] (cmdset: CharacterCmdSet, help-category: General)
inventory [i, inv] (cmdset: CharacterCmdSet, help-category: General)
irc2chan (cmdset: AccountCmdSet, help-category: Comms)
ircstatus (cmdset: AccountCmdSet, help-category: Comms)
link (cmdset: CharacterCmdSet, help-category: Building)
lock [locks] (cmdset: CharacterCmdSet, help-category: Building)
look [ls, l] (cmdset: AccountCmdSet, help-category: General)
look [ls, l] (cmdset: CharacterCmdSet, help-category: General)
look [l, ls] (cmdset: AccountCmdSet, help-category: General)
look [l, ls] (cmdset: CharacterCmdSet, help-category: General)
mvattr (cmdset: CharacterCmdSet, help-category: Building)
name [rename] (cmdset: CharacterCmdSet, help-category: Building)
nick [nickname, nicks] (cmdset: AccountCmdSet, help-category: General)
objects [listobjs, listobjects, db, stats] (cmdset: CharacterCmdSet, help-category: System)
objects [stats, db, listobjects, listobjs] (cmdset: CharacterCmdSet, help-category: System)
ooc [unpuppet] (cmdset: AccountCmdSet, help-category: General)
open (cmdset: CharacterCmdSet, help-category: Building)
option [options] (cmdset: AccountCmdSet, help-category: General)
quell [unquell] (cmdset: AccountCmdSet, help-category: General)
quit (cmdset: AccountCmdSet, help-category: General)
quit [qu, q] (cmdset: UnloggedinCmdSet, help-category: General)
quit [q, qu] (cmdset: UnloggedinCmdSet, help-category: General)
reload [restart] (cmdset: AccountCmdSet, help-category: System)
reset [reboot] (cmdset: AccountCmdSet, help-category: System)
rss2chan (cmdset: AccountCmdSet, help-category: Comms)
say [’, “] (cmdset: CharacterCmdSet, help-category: General)
say [”, ‘] (cmdset: CharacterCmdSet, help-category: General)
screenreader (cmdset: UnloggedinCmdSet, help-category: General)
script [addscript] (cmdset: CharacterCmdSet, help-category: Building)
scripts [listscripts, globalscript] (cmdset: CharacterCmdSet, help-category: System)
server [serverprocess, serverload] (cmdset: CharacterCmdSet, help-category: System)
server [serverload, serverprocess] (cmdset: CharacterCmdSet, help-category: System)
service [services] (cmdset: CharacterCmdSet, help-category: System)
sessions (cmdset: SessionCmdSet, help-category: General)
set (cmdset: CharacterCmdSet, help-category: Building)
time [uptime] (cmdset: CharacterCmdSet, help-category: System)
tunnel [tun] (cmdset: CharacterCmdSet, help-category: Building)
typeclass [type, update, parent, swap] (cmdset: CharacterCmdSet, help-category: Building)
typeclass [swap, update, type, parent] (cmdset: CharacterCmdSet, help-category: Building)
unlink (cmdset: CharacterCmdSet, help-category: Building)
whisper (cmdset: CharacterCmdSet, help-category: General)
who [doing] (cmdset: AccountCmdSet, help-category: General)
aliases = ['ls', 'l']¶aliases = ['l', 'ls']¶
aliases = ['batchcommand', 'batchcmd']¶aliases = ['batchcmd', 'batchcommand']¶
aliases = ['type', 'update', 'parent', 'swap']¶aliases = ['swap', 'update', 'type', 'parent']¶
aliases = ['search', 'locate']¶aliases = ['locate', 'search']¶
aliases = ['channellist', 'comlist', 'clist', 'chanlist', 'all channels']¶aliases = ['clist', 'channellist', 'chanlist', 'all channels', 'comlist']¶
aliases = ['ls', 'l']¶aliases = ['l', 'ls']¶
aliases = ['inv', 'i']¶aliases = ['i', 'inv']¶
aliases = ["'", '"']¶aliases = ['"', "'"]¶
aliases = ['groups', 'hierarchy']¶aliases = ['hierarchy', 'groups']¶
aliases = ['listobjs', 'listobjects', 'db', 'stats']¶aliases = ['stats', 'db', 'listobjects', 'listobjs']¶
aliases = ['serverprocess', 'serverload']¶aliases = ['serverload', 'serverprocess']¶
aliases = ['listaccounts', 'account']¶aliases = ['account', 'listaccounts']¶
aliases = ['conn', 'con', 'co']¶aliases = ['con', 'conn', 'co']¶
aliases = ['qu', 'q']¶aliases = ['q', 'qu']¶
aliases = ['offers', 'deal']¶aliases = ['deal', 'offers']¶
aliases = ['ls', 'l']¶aliases = ['l', 'ls']¶
aliases = ['inv', 'i']¶aliases = ['i', 'inv']¶
aliases = ['roll', '@dice']¶aliases = ['@dice', 'roll']¶
aliases = ['conn', 'con', 'co']¶aliases = ['con', 'conn', 'co']¶
aliases = ['qu', 'q']¶aliases = ['q', 'qu']¶
aliases = ['ls', 'l']¶aliases = ['l', 'ls']¶
aliases = ["'", '"']¶aliases = ['"', "'"]¶
aliases = ['press button', 'press', 'push']¶
aliases = ['open button', 'open']¶
aliases = ['l', 'get', 'feel', 'examine', 'listen', 'ex']¶
aliases = ['burn', 'light']¶aliases = ['light', 'burn']¶
aliases = ['move', 'push', 'shiftroot', 'pull']¶aliases = ['move', 'pull', 'push', 'shiftroot']¶
aliases = ['button', 'push button', 'press button']¶aliases = ['press button', 'push button', 'button']¶
aliases = ['defend', 'parry', 'fight', 'kill', 'thrust', 'chop', 'bash', 'stab', 'slash', 'hit', 'pierce']¶aliases = ['fight', 'thrust', 'stab', 'parry', 'defend', 'pierce', 'chop', 'hit', 'slash', 'kill', 'bash']¶
aliases = ['ls', 'l']¶aliases = ['l', 'ls']¶
aliases = ['search', 'fiddle', 'feel', 'feel around', 'l']¶aliases = ['l', 'search', 'feel around', 'fiddle', 'feel']¶
aliases = [':w', ':u', ':i', ':q!', ':DD', ':y', '::', ':::', ':f', ':wq', ':I', ':fi', ':r', ':UU', ':x', ':!', ':=', ':echo', ':', ':dd', ':A', ':<', ':fd', ':S', ':q', ':s', ':p', ':h', ':>', ':uu', ':j', ':dw']¶aliases = [':f', ':=', ':dw', ':I', ':q', ':A', ':uu', ':echo', ':y', ':h', ':>', '::', ':s', ':i', ':', ':<', ':wq', ':x', ':UU', ':q!', ':w', ':fd', ':!', ':dd', ':u', ':DD', ':p', ':fi', ':::', ':r', ':j', ':S']¶
aliases = ['abort', 'quit', 'q', 'next', 'e', 'top', 'back', 'b', 'n', 'a', 'end', 't']¶aliases = ['e', 'abort', 't', 'b', 'quit', 'a', 'n', 'end', 'next', 'back', 'q', 'top']¶
Have type/force default to update-mode rather than resetmode and add more verbose
warning when using reset mode.
Attribute storage support defaultdics (Hendher)
Add is_ooc lockfunc (meant for limiting commands at the OOC level)
Add ObjectParent mixin to default game folder template as an easy, ready-made +way to override features on all ObjectDB-inheriting objects easily.
ObjectAll in-game objects in Evennia, be it characters, chairs, monsters, rooms or hand grenades are represented by an Evennia Object. Objects form the core of Evennia and is probably what you’ll spend most time working with. Objects are Typeclassed entities.
+An Evennia Object is, by definition, a Python class that includes
+evennia.objects.objects.DefaultObject among its
+parents. Evennia defines several subclasses of DefaultObject:
evennia.objects.objects.DefaultCharacter - +the normal in-game Character, controlled by a player.
evennia.objects.objects.DefaultRoom - a location in the game world.
evennia.objects.objects.DefaultExit - an entity that (usually) sits +in a room and represents a one-way connection to another location.
You will usually not use the Default* parents themselves. In mygame/typeclasses/ there are
+convenient subclasses to use. They are empty, and thus identical to
+the defaults. Tweaking them is one of the main ways to customize you game!
mygame.typeclasses.objects.Object (inherits from DefaultObject)
mygame.typeclasses.characters.Character (inherits from DefaultCharacter)
mygame.typeclasses.rooms.Room (inherits from DefaultRoom)
mygame.typeclasses.exits.Exit (inherits from DefaultExit)
An Evennia Object is, per definition, a Python class that includes evennia.DefaultObject among its
-parents. In mygame/typeclasses/objects.py there is already a class Object that inherits from
-DefaultObject and that you can inherit from. You can put your new typeclass directly in that
-module or you could organize your code in some other way. Here we assume we make a new module
+
You can easily add your own in-game behavior by either modifying one of the typeclasses in +your game dir or by inheriting from them.
+You can put your new typeclass directly in the relevant parent
+module, or you could organize your code in some other way. Here we assume we make a new module
mygame/typeclasses/flowers.py:
# mygame/typeclasses/flowers.py
@@ -74,29 +93,61 @@ module or you could organize your code in some other way. Here we assume we make
self.db.desc = "This is a pretty rose with thorns."
You could save this in the mygame/typeclasses/objects.py (then you’d not need to import Object)
-or you can put it in a new module. Let’s say we do the latter, making a module
-typeclasses/flowers.py. Now you just need to point to the class Rose with the @create command
+
Now you just need to point to the class Rose with the create command
to make a new rose:
@create/drop MyRose:flowers.Rose
What the @create command actually does is to use evennia.create_object. You can do the same
-thing yourself in code:
What the create command actually does is to use the evennia.create_object
+function. You can do the same thing yourself in code:
from evennia import create_object
new_rose = create_object("typeclasses.flowers.Rose", key="MyRose")
(The @create command will auto-append the most likely path to your typeclass, if you enter the
+
(The create command will auto-append the most likely path to your typeclass, if you enter the
call manually you have to give the full path to the class. The create.create_object function is
powerful and should be used for all coded object creating (so this is what you use when defining
-your own building commands). Check out the ev.create_* functions for how to build other entities
-like Scripts).
This particular Rose class doesn’t really do much, all it does it make sure the attribute
desc(which is what the look command looks for) is pre-set, which is pretty pointless since you
-will usually want to change this at build time (using the @desc command or using the
-Spawner). The Object typeclass offers many more hooks that is available
-to use though - see next section.
desc command or using the
+Spawner).
+Object, Character, Room and Exit also inherit from mygame.typeclasses.objects.ObjectParent.
+This is an empty ‘mixin’ class. Optionally, you can modify this class if you want to easily add some common functionality to all
+your Objects, Characters, Rooms and Exits at once. You can still customize each subclass separately (see the Python
+docs on multiple inheritance for details).
For example:
+# in mygame/typeclasses/objects.py
+# ...
+
+from evennia.objects.objects import DefaultObject
+
+class ObjectParent:
+ def at_pre_get(self, getter, **kwargs):
+ # make all entities by default un-pickable
+ return False
+
+class Object(ObjectParent, DefaultObject):
+ # replaces at_pre_get with its own
+ def at_pre_get(self, getter, **kwargs):
+ return True
+
+# each in their respective modules ...
+
+class Character(ObjectParent, DefaultCharacter):
+ # will inherit at_pre_get from ObjectParent
+ pass
+
+class Exit(ObjectParent, DefaultExit):
+ # Overrides and uses the DefaultExit version of at_pre_get instead
+ def at_pre_get(self, getter, **kwargs):
+ return DefaultExit.at_pre_get(self, getter, **kwargs)
+
+ObjectBy default the CooldownHandler will use the cooldowns property, but you can
customize this if desired by passing a different value for the db_attribute
parameter.
-from evennia.contrib.game_systems.cooldowns import Cooldownhandler
+from evennia.contrib.game_systems.cooldowns import CooldownHandler
from evennia.utils.utils import lazy_property
@lazy_property
diff --git a/docs/1.0-dev/_modules/django/conf.html b/docs/1.0-dev/_modules/django/conf.html
index ff0e182916..f641f02a1d 100644
--- a/docs/1.0-dev/_modules/django/conf.html
+++ b/docs/1.0-dev/_modules/django/conf.html
@@ -64,15 +64,15 @@
# RemovedInDjango50Warning
USE_DEPRECATED_PYTZ_DEPRECATED_MSG = (
- 'The USE_DEPRECATED_PYTZ setting, and support for pytz timezones is '
- 'deprecated in favor of the stdlib zoneinfo module. Please update your '
- 'code to use zoneinfo and remove the USE_DEPRECATED_PYTZ setting.'
+ "The USE_DEPRECATED_PYTZ setting, and support for pytz timezones is "
+ "deprecated in favor of the stdlib zoneinfo module. Please update your "
+ "code to use zoneinfo and remove the USE_DEPRECATED_PYTZ setting."
)
USE_L10N_DEPRECATED_MSG = (
- 'The USE_L10N setting is deprecated. Starting with Django 5.0, localized '
- 'formatting of data will always be enabled. For example Django will '
- 'display numbers and dates using the format of the current locale.'
+ "The USE_L10N setting is deprecated. Starting with Django 5.0, localized "
+ "formatting of data will always be enabled. For example Django will "
+ "display numbers and dates using the format of the current locale."
)
@@ -81,6 +81,7 @@
String subclass which references a current settings value. It's treated as
the value in memory but serializes to a settings.NAME attribute reference.
"""
+
def __new__(self, value, setting_name):
return str.__new__(self, value)
@@ -94,6 +95,7 @@
The user can manually configure settings prior to using them. Otherwise,
Django uses the settings module pointed to by DJANGO_SETTINGS_MODULE.
"""
+
def _setup(self, name=None):
"""
Load the settings module pointed to by the environment variable. This
@@ -107,16 +109,17 @@
"Requested %s, but settings are not configured. "
"You must either define the environment variable %s "
"or call settings.configure() before accessing settings."
- % (desc, ENVIRONMENT_VARIABLE))
+ % (desc, ENVIRONMENT_VARIABLE)
+ )
self._wrapped = Settings(settings_module)
def __repr__(self):
# Hardcode the class name as otherwise it yields 'Settings'.
if self._wrapped is empty:
- return '<LazySettings [Unevaluated]>'
+ return "<LazySettings [Unevaluated]>"
return '<LazySettings "%(settings_module)s">' % {
- 'settings_module': self._wrapped.SETTINGS_MODULE,
+ "settings_module": self._wrapped.SETTINGS_MODULE,
}
def __getattr__(self, name):
@@ -127,9 +130,9 @@
# Special case some settings which require further modification.
# This is done here for performance reasons so the modified value is cached.
- if name in {'MEDIA_URL', 'STATIC_URL'} and val is not None:
+ if name in {"MEDIA_URL", "STATIC_URL"} and val is not None:
val = self._add_script_prefix(val)
- elif name == 'SECRET_KEY' and not val:
+ elif name == "SECRET_KEY" and not val:
raise ImproperlyConfigured("The SECRET_KEY setting must not be empty.")
self.__dict__[name] = val
@@ -140,7 +143,7 @@
Set the value of setting. Clear all cached values if _wrapped changes
(@override_settings does this) or clear single values when set.
"""
- if name == '_wrapped':
+ if name == "_wrapped":
self.__dict__.clear()
else:
self.__dict__.pop(name, None)
@@ -158,11 +161,11 @@
argument must support attribute access (__getattr__)).
"""
if self._wrapped is not empty:
- raise RuntimeError('Settings already configured.')
+ raise RuntimeError("Settings already configured.")
holder = UserSettingsHolder(default_settings)
for name, value in options.items():
if not name.isupper():
- raise TypeError('Setting %r must be uppercase.' % name)
+ raise TypeError("Setting %r must be uppercase." % name)
setattr(holder, name, value)
self._wrapped = holder
@@ -175,10 +178,11 @@
subpath to STATIC_URL and MEDIA_URL in settings is inconvenient.
"""
# Don't apply prefix to absolute paths and URLs.
- if value.startswith(('http://', 'https://', '/')):
+ if value.startswith(("http://", "https://", "/")):
return value
from django.urls import get_script_prefix
- return '%s%s' % (get_script_prefix(), value)
+
+ return "%s%s" % (get_script_prefix(), value)
@property
def configured(self):
@@ -197,14 +201,14 @@
RemovedInDjango50Warning,
stacklevel=2,
)
- return self.__getattr__('USE_L10N')
+ return self.__getattr__("USE_L10N")
# RemovedInDjango50Warning.
@property
def _USE_L10N_INTERNAL(self):
# Special hook to avoid checking a traceback in internal use on hot
# paths.
- return self.__getattr__('USE_L10N')
+ return self.__getattr__("USE_L10N")
class Settings:
@@ -220,7 +224,7 @@
mod = importlib.import_module(self.SETTINGS_MODULE)
tuple_settings = (
- 'ALLOWED_HOSTS',
+ "ALLOWED_HOSTS",
"INSTALLED_APPS",
"TEMPLATE_DIRS",
"LOCALE_PATHS",
@@ -230,36 +234,39 @@
if setting.isupper():
setting_value = getattr(mod, setting)
- if (setting in tuple_settings and
- not isinstance(setting_value, (list, tuple))):
- raise ImproperlyConfigured("The %s setting must be a list or a tuple." % setting)
+ if setting in tuple_settings and not isinstance(
+ setting_value, (list, tuple)
+ ):
+ raise ImproperlyConfigured(
+ "The %s setting must be a list or a tuple." % setting
+ )
setattr(self, setting, setting_value)
self._explicit_settings.add(setting)
- if self.USE_TZ is False and not self.is_overridden('USE_TZ'):
+ if self.USE_TZ is False and not self.is_overridden("USE_TZ"):
warnings.warn(
- 'The default value of USE_TZ will change from False to True '
- 'in Django 5.0. Set USE_TZ to False in your project settings '
- 'if you want to keep the current default behavior.',
+ "The default value of USE_TZ will change from False to True "
+ "in Django 5.0. Set USE_TZ to False in your project settings "
+ "if you want to keep the current default behavior.",
category=RemovedInDjango50Warning,
)
- if self.is_overridden('USE_DEPRECATED_PYTZ'):
+ if self.is_overridden("USE_DEPRECATED_PYTZ"):
warnings.warn(USE_DEPRECATED_PYTZ_DEPRECATED_MSG, RemovedInDjango50Warning)
- if hasattr(time, 'tzset') and self.TIME_ZONE:
+ if hasattr(time, "tzset") and self.TIME_ZONE:
# When we can, attempt to validate the timezone. If we can't find
# this file, no check happens and it's harmless.
- zoneinfo_root = Path('/usr/share/zoneinfo')
- zone_info_file = zoneinfo_root.joinpath(*self.TIME_ZONE.split('/'))
+ zoneinfo_root = Path("/usr/share/zoneinfo")
+ zone_info_file = zoneinfo_root.joinpath(*self.TIME_ZONE.split("/"))
if zoneinfo_root.exists() and not zone_info_file.exists():
raise ValueError("Incorrect timezone setting: %s" % self.TIME_ZONE)
# Move the time zone info into os.environ. See ticket #2315 for why
# we don't do this unconditionally (breaks Windows).
- os.environ['TZ'] = self.TIME_ZONE
+ os.environ["TZ"] = self.TIME_ZONE
time.tzset()
- if self.is_overridden('USE_L10N'):
+ if self.is_overridden("USE_L10N"):
warnings.warn(USE_L10N_DEPRECATED_MSG, RemovedInDjango50Warning)
def is_overridden(self, setting):
@@ -267,13 +274,14 @@
def __repr__(self):
return '<%(cls)s "%(settings_module)s">' % {
- 'cls': self.__class__.__name__,
- 'settings_module': self.SETTINGS_MODULE,
+ "cls": self.__class__.__name__,
+ "settings_module": self.SETTINGS_MODULE,
}
class UserSettingsHolder:
"""Holder for user configured settings."""
+
# SETTINGS_MODULE doesn't make much sense in the manually configured
# (standalone) case.
SETTINGS_MODULE = None
@@ -283,7 +291,7 @@
Requests for configuration variables not in this class are satisfied
from the module specified in default_settings (if possible).
"""
- self.__dict__['_deleted'] = set()
+ self.__dict__["_deleted"] = set()
self.default_settings = default_settings
def __getattr__(self, name):
@@ -293,10 +301,10 @@
def __setattr__(self, name, value):
self._deleted.discard(name)
- if name == 'USE_L10N':
+ if name == "USE_L10N":
warnings.warn(USE_L10N_DEPRECATED_MSG, RemovedInDjango50Warning)
super().__setattr__(name, value)
- if name == 'USE_DEPRECATED_PYTZ':
+ if name == "USE_DEPRECATED_PYTZ":
warnings.warn(USE_DEPRECATED_PYTZ_DEPRECATED_MSG, RemovedInDjango50Warning)
def __delattr__(self, name):
@@ -306,19 +314,22 @@
def __dir__(self):
return sorted(
- s for s in [*self.__dict__, *dir(self.default_settings)]
+ s
+ for s in [*self.__dict__, *dir(self.default_settings)]
if s not in self._deleted
)
def is_overridden(self, setting):
- deleted = (setting in self._deleted)
- set_locally = (setting in self.__dict__)
- set_on_default = getattr(self.default_settings, 'is_overridden', lambda s: False)(setting)
+ deleted = setting in self._deleted
+ set_locally = setting in self.__dict__
+ set_on_default = getattr(
+ self.default_settings, "is_overridden", lambda s: False
+ )(setting)
return deleted or set_locally or set_on_default
def __repr__(self):
- return '<%(cls)s>' % {
- 'cls': self.__class__.__name__,
+ return "<%(cls)s>" % {
+ "cls": self.__class__.__name__,
}
diff --git a/docs/1.0-dev/_modules/django/db/models/fields/related_descriptors.html b/docs/1.0-dev/_modules/django/db/models/fields/related_descriptors.html
index 5a1f588f52..fd3e82959d 100644
--- a/docs/1.0-dev/_modules/django/db/models/fields/related_descriptors.html
+++ b/docs/1.0-dev/_modules/django/db/models/fields/related_descriptors.html
@@ -115,7 +115,9 @@
class ForeignKeyDeferredAttribute(DeferredAttribute):
def __set__(self, instance, value):
- if instance.__dict__.get(self.field.attname) != value and self.field.is_cached(instance):
+ if instance.__dict__.get(self.field.attname) != value and self.field.is_cached(
+ instance
+ ):
self.field.delete_cached_value(instance)
instance.__dict__[self.field.attname] = value
@@ -142,14 +144,16 @@
# related model might not be resolved yet; `self.field.model` might
# still be a string model reference.
return type(
- 'RelatedObjectDoesNotExist',
- (self.field.remote_field.model.DoesNotExist, AttributeError), {
- '__module__': self.field.model.__module__,
- '__qualname__': '%s.%s.RelatedObjectDoesNotExist' % (
+ "RelatedObjectDoesNotExist",
+ (self.field.remote_field.model.DoesNotExist, AttributeError),
+ {
+ "__module__": self.field.model.__module__,
+ "__qualname__": "%s.%s.RelatedObjectDoesNotExist"
+ % (
self.field.model.__qualname__,
self.field.name,
),
- }
+ },
)
def is_cached(self, instance):
@@ -176,9 +180,12 @@
# The check for len(...) == 1 is a special case that allows the query
# to be join-less and smaller. Refs #21760.
if remote_field.is_hidden() or len(self.field.foreign_related_fields) == 1:
- query = {'%s__in' % related_field.name: {instance_attr(inst)[0] for inst in instances}}
+ query = {
+ "%s__in"
+ % related_field.name: {instance_attr(inst)[0] for inst in instances}
+ }
else:
- query = {'%s__in' % self.field.related_query_name(): instances}
+ query = {"%s__in" % self.field.related_query_name(): instances}
queryset = queryset.filter(**query)
# Since we're going to assign directly in the cache,
@@ -187,7 +194,14 @@
for rel_obj in queryset:
instance = instances_dict[rel_obj_attr(rel_obj)]
remote_field.set_cached_value(rel_obj, instance)
- return queryset, rel_obj_attr, instance_attr, True, self.field.get_cache_name(), False
+ return (
+ queryset,
+ rel_obj_attr,
+ instance_attr,
+ True,
+ self.field.get_cache_name(),
+ False,
+ )
def get_object(self, instance):
qs = self.get_queryset(instance=instance)
@@ -214,7 +228,11 @@
rel_obj = self.field.get_cached_value(instance)
except KeyError:
has_value = None not in self.field.get_local_related_value(instance)
- ancestor_link = instance._meta.get_ancestor_link(self.field.model) if has_value else None
+ ancestor_link = (
+ instance._meta.get_ancestor_link(self.field.model)
+ if has_value
+ else None
+ )
if ancestor_link and ancestor_link.is_cached(instance):
# An ancestor link will exist if this field is defined on a
# multi-table inheritance parent of the instance's class.
@@ -252,9 +270,12 @@
- ``value`` is the ``parent`` instance on the right of the equal sign
"""
# An object must be an instance of the related class.
- if value is not None and not isinstance(value, self.field.remote_field.model._meta.concrete_model):
+ if value is not None and not isinstance(
+ value, self.field.remote_field.model._meta.concrete_model
+ ):
raise ValueError(
- 'Cannot assign "%r": "%s.%s" must be a "%s" instance.' % (
+ 'Cannot assign "%r": "%s.%s" must be a "%s" instance.'
+ % (
value,
instance._meta.object_name,
self.field.name,
@@ -263,11 +284,18 @@
)
elif value is not None:
if instance._state.db is None:
- instance._state.db = router.db_for_write(instance.__class__, instance=value)
+ instance._state.db = router.db_for_write(
+ instance.__class__, instance=value
+ )
if value._state.db is None:
- value._state.db = router.db_for_write(value.__class__, instance=instance)
+ value._state.db = router.db_for_write(
+ value.__class__, instance=instance
+ )
if not router.allow_relation(value, instance):
- raise ValueError('Cannot assign "%r": the current database router prevents this relation.' % value)
+ raise ValueError(
+ 'Cannot assign "%r": the current database router prevents this '
+ "relation." % value
+ )
remote_field = self.field.remote_field
# If we're setting the value of a OneToOneField to None, we need to clear
@@ -355,12 +383,15 @@
opts = instance._meta
# Inherited primary key fields from this object's base classes.
inherited_pk_fields = [
- field for field in opts.concrete_fields
+ field
+ for field in opts.concrete_fields
if field.primary_key and field.remote_field
]
for field in inherited_pk_fields:
rel_model_pk_name = field.remote_field.model._meta.pk.attname
- raw_value = getattr(value, rel_model_pk_name) if value is not None else None
+ raw_value = (
+ getattr(value, rel_model_pk_name) if value is not None else None
+ )
setattr(instance, rel_model_pk_name, raw_value)
@@ -387,13 +418,15 @@
# The exception isn't created at initialization time for the sake of
# consistency with `ForwardManyToOneDescriptor`.
return type(
- 'RelatedObjectDoesNotExist',
- (self.related.related_model.DoesNotExist, AttributeError), {
- '__module__': self.related.model.__module__,
- '__qualname__': '%s.%s.RelatedObjectDoesNotExist' % (
+ "RelatedObjectDoesNotExist",
+ (self.related.related_model.DoesNotExist, AttributeError),
+ {
+ "__module__": self.related.model.__module__,
+ "__qualname__": "%s.%s.RelatedObjectDoesNotExist"
+ % (
self.related.model.__qualname__,
self.related.name,
- )
+ ),
},
)
@@ -411,7 +444,7 @@
rel_obj_attr = self.related.field.get_local_related_value
instance_attr = self.related.field.get_foreign_related_value
instances_dict = {instance_attr(inst): inst for inst in instances}
- query = {'%s__in' % self.related.field.name: instances}
+ query = {"%s__in" % self.related.field.name: instances}
queryset = queryset.filter(**query)
# Since we're going to assign directly in the cache,
@@ -419,7 +452,14 @@
for rel_obj in queryset:
instance = instances_dict[rel_obj_attr(rel_obj)]
self.related.field.set_cached_value(rel_obj, instance)
- return queryset, rel_obj_attr, instance_attr, True, self.related.get_cache_name(), False
+ return (
+ queryset,
+ rel_obj_attr,
+ instance_attr,
+ True,
+ self.related.get_cache_name(),
+ False,
+ )
def __get__(self, instance, cls=None):
"""
@@ -460,10 +500,8 @@
if rel_obj is None:
raise self.RelatedObjectDoesNotExist(
- "%s has no %s." % (
- instance.__class__.__name__,
- self.related.get_accessor_name()
- )
+ "%s has no %s."
+ % (instance.__class__.__name__, self.related.get_accessor_name())
)
else:
return rel_obj
@@ -499,7 +537,8 @@
elif not isinstance(value, self.related.related_model):
# An object must be an instance of the related class.
raise ValueError(
- 'Cannot assign "%r": "%s.%s" must be a "%s" instance.' % (
+ 'Cannot assign "%r": "%s.%s" must be a "%s" instance.'
+ % (
value,
instance._meta.object_name,
self.related.get_accessor_name(),
@@ -508,14 +547,25 @@
)
else:
if instance._state.db is None:
- instance._state.db = router.db_for_write(instance.__class__, instance=value)
+ instance._state.db = router.db_for_write(
+ instance.__class__, instance=value
+ )
if value._state.db is None:
- value._state.db = router.db_for_write(value.__class__, instance=instance)
+ value._state.db = router.db_for_write(
+ value.__class__, instance=instance
+ )
if not router.allow_relation(value, instance):
- raise ValueError('Cannot assign "%r": the current database router prevents this relation.' % value)
+ raise ValueError(
+ 'Cannot assign "%r": the current database router prevents this '
+ "relation." % value
+ )
- related_pk = tuple(getattr(instance, field.attname) for field in self.related.field.foreign_related_fields)
- # Set the value of the related field to the value of the related object's related field
+ related_pk = tuple(
+ getattr(instance, field.attname)
+ for field in self.related.field.foreign_related_fields
+ )
+ # Set the value of the related field to the value of the related
+ # object's related field.
for index, field in enumerate(self.related.field.local_related_fields):
setattr(value, field.attname, related_pk[index])
@@ -578,13 +628,13 @@
def _get_set_deprecation_msg_params(self):
return (
- 'reverse side of a related set',
+ "reverse side of a related set",
self.rel.get_accessor_name(),
)
def __set__(self, instance, value):
raise TypeError(
- 'Direct assignment to the %s is prohibited. Use %s.set() instead.'
+ "Direct assignment to the %s is prohibited. Use %s.set() instead."
% self._get_set_deprecation_msg_params(),
)
@@ -611,6 +661,7 @@
manager = getattr(self.model, manager)
manager_class = create_reverse_many_to_one_manager(manager.__class__, rel)
return manager_class(self.instance)
+
do_not_call_in_templates = True
def _apply_rel_filters(self, queryset):
@@ -618,7 +669,9 @@
Filter the queryset for the instance this manager is bound to.
"""
db = self._db or router.db_for_read(self.model, instance=self.instance)
- empty_strings_as_null = connections[db].features.interprets_empty_strings_as_nulls
+ empty_strings_as_null = connections[
+ db
+ ].features.interprets_empty_strings_as_nulls
queryset._add_hints(instance=self.instance)
if self._db:
queryset = queryset.using(self._db)
@@ -626,7 +679,7 @@
queryset = queryset.filter(**self.core_filters)
for field in self.field.foreign_related_fields:
val = getattr(self.instance, field.attname)
- if val is None or (val == '' and empty_strings_as_null):
+ if val is None or (val == "" and empty_strings_as_null):
return queryset.none()
if self.field.many_to_one:
# Guard against field-like objects such as GenericRelation
@@ -638,24 +691,34 @@
except FieldError:
# The relationship has multiple target fields. Use a tuple
# for related object id.
- rel_obj_id = tuple([
- getattr(self.instance, target_field.attname)
- for target_field in self.field.get_path_info()[-1].target_fields
- ])
+ rel_obj_id = tuple(
+ [
+ getattr(self.instance, target_field.attname)
+ for target_field in self.field.get_path_info()[
+ -1
+ ].target_fields
+ ]
+ )
else:
rel_obj_id = getattr(self.instance, target_field.attname)
- queryset._known_related_objects = {self.field: {rel_obj_id: self.instance}}
+ queryset._known_related_objects = {
+ self.field: {rel_obj_id: self.instance}
+ }
return queryset
def _remove_prefetched_objects(self):
try:
- self.instance._prefetched_objects_cache.pop(self.field.remote_field.get_cache_name())
+ self.instance._prefetched_objects_cache.pop(
+ self.field.remote_field.get_cache_name()
+ )
except (AttributeError, KeyError):
pass # nothing to clear from cache
def get_queryset(self):
try:
- return self.instance._prefetched_objects_cache[self.field.remote_field.get_cache_name()]
+ return self.instance._prefetched_objects_cache[
+ self.field.remote_field.get_cache_name()
+ ]
except (AttributeError, KeyError):
queryset = super().get_queryset()
return self._apply_rel_filters(queryset)
@@ -670,7 +733,7 @@
rel_obj_attr = self.field.get_local_related_value
instance_attr = self.field.get_foreign_related_value
instances_dict = {instance_attr(inst): inst for inst in instances}
- query = {'%s__in' % self.field.name: instances}
+ query = {"%s__in" % self.field.name: instances}
queryset = queryset.filter(**query)
# Since we just bypassed this class' get_queryset(), we must manage
@@ -687,9 +750,13 @@
def check_and_update_obj(obj):
if not isinstance(obj, self.model):
- raise TypeError("'%s' instance expected, got %r" % (
- self.model._meta.object_name, obj,
- ))
+ raise TypeError(
+ "'%s' instance expected, got %r"
+ % (
+ self.model._meta.object_name,
+ obj,
+ )
+ )
setattr(obj, self.field.name, self.instance)
if bulk:
@@ -702,36 +769,44 @@
"the object first." % obj
)
pks.append(obj.pk)
- self.model._base_manager.using(db).filter(pk__in=pks).update(**{
- self.field.name: self.instance,
- })
+ self.model._base_manager.using(db).filter(pk__in=pks).update(
+ **{
+ self.field.name: self.instance,
+ }
+ )
else:
with transaction.atomic(using=db, savepoint=False):
for obj in objs:
check_and_update_obj(obj)
obj.save()
+
add.alters_data = True
def create(self, **kwargs):
kwargs[self.field.name] = self.instance
db = router.db_for_write(self.model, instance=self.instance)
return super(RelatedManager, self.db_manager(db)).create(**kwargs)
+
create.alters_data = True
def get_or_create(self, **kwargs):
kwargs[self.field.name] = self.instance
db = router.db_for_write(self.model, instance=self.instance)
return super(RelatedManager, self.db_manager(db)).get_or_create(**kwargs)
+
get_or_create.alters_data = True
def update_or_create(self, **kwargs):
kwargs[self.field.name] = self.instance
db = router.db_for_write(self.model, instance=self.instance)
return super(RelatedManager, self.db_manager(db)).update_or_create(**kwargs)
+
update_or_create.alters_data = True
- # remove() and clear() are only provided if the ForeignKey can have a value of null.
+ # remove() and clear() are only provided if the ForeignKey can have a
+ # value of null.
if rel.field.null:
+
def remove(self, *objs, bulk=True):
if not objs:
return
@@ -739,9 +814,13 @@
old_ids = set()
for obj in objs:
if not isinstance(obj, self.model):
- raise TypeError("'%s' instance expected, got %r" % (
- self.model._meta.object_name, obj,
- ))
+ raise TypeError(
+ "'%s' instance expected, got %r"
+ % (
+ self.model._meta.object_name,
+ obj,
+ )
+ )
# Is obj actually part of this descriptor set?
if self.field.get_local_related_value(obj) == val:
old_ids.add(obj.pk)
@@ -750,10 +829,12 @@
"%r is not related to %r." % (obj, self.instance)
)
self._clear(self.filter(pk__in=old_ids), bulk)
+
remove.alters_data = True
def clear(self, *, bulk=True):
self._clear(self, bulk)
+
clear.alters_data = True
def _clear(self, queryset, bulk):
@@ -768,6 +849,7 @@
for obj in queryset:
setattr(obj, self.field.name, None)
obj.save(update_fields=[self.field.name])
+
_clear.alters_data = True
def set(self, objs, *, bulk=True, clear=False):
@@ -794,6 +876,7 @@
self.add(*new_objs, bulk=bulk)
else:
self.add(*objs, bulk=bulk)
+
set.alters_data = True
return RelatedManager
@@ -840,7 +923,8 @@
def _get_set_deprecation_msg_params(self):
return (
- '%s side of a many-to-many set' % ('reverse' if self.reverse else 'forward'),
+ "%s side of a many-to-many set"
+ % ("reverse" if self.reverse else "forward"),
self.rel.get_accessor_name() if self.reverse else self.field.name,
)
@@ -883,41 +967,51 @@
self.core_filters = {}
self.pk_field_names = {}
for lh_field, rh_field in self.source_field.related_fields:
- core_filter_key = '%s__%s' % (self.query_field_name, rh_field.name)
+ core_filter_key = "%s__%s" % (self.query_field_name, rh_field.name)
self.core_filters[core_filter_key] = getattr(instance, rh_field.attname)
self.pk_field_names[lh_field.name] = rh_field.name
self.related_val = self.source_field.get_foreign_related_value(instance)
if None in self.related_val:
- raise ValueError('"%r" needs to have a value for field "%s" before '
- 'this many-to-many relationship can be used.' %
- (instance, self.pk_field_names[self.source_field_name]))
+ raise ValueError(
+ '"%r" needs to have a value for field "%s" before '
+ "this many-to-many relationship can be used."
+ % (instance, self.pk_field_names[self.source_field_name])
+ )
# Even if this relation is not to pk, we require still pk value.
# The wish is that the instance has been already saved to DB,
# although having a pk value isn't a guarantee of that.
if instance.pk is None:
- raise ValueError("%r instance needs to have a primary key value before "
- "a many-to-many relationship can be used." %
- instance.__class__.__name__)
+ raise ValueError(
+ "%r instance needs to have a primary key value before "
+ "a many-to-many relationship can be used."
+ % instance.__class__.__name__
+ )
def __call__(self, *, manager):
manager = getattr(self.model, manager)
- manager_class = create_forward_many_to_many_manager(manager.__class__, rel, reverse)
+ manager_class = create_forward_many_to_many_manager(
+ manager.__class__, rel, reverse
+ )
return manager_class(instance=self.instance)
+
do_not_call_in_templates = True
def _build_remove_filters(self, removed_vals):
filters = Q((self.source_field_name, self.related_val))
# No need to add a subquery condition if removed_vals is a QuerySet without
# filters.
- removed_vals_filters = (not isinstance(removed_vals, QuerySet) or
- removed_vals._has_filters())
+ removed_vals_filters = (
+ not isinstance(removed_vals, QuerySet) or removed_vals._has_filters()
+ )
if removed_vals_filters:
- filters &= Q((f'{self.target_field_name}__in', removed_vals))
+ filters &= Q((f"{self.target_field_name}__in", removed_vals))
if self.symmetrical:
symmetrical_filters = Q((self.target_field_name, self.related_val))
if removed_vals_filters:
- symmetrical_filters &= Q((f'{self.source_field_name}__in', removed_vals))
+ symmetrical_filters &= Q(
+ (f"{self.source_field_name}__in", removed_vals)
+ )
filters |= symmetrical_filters
return filters
@@ -951,7 +1045,7 @@
queryset._add_hints(instance=instances[0])
queryset = queryset.using(queryset._db or self._db)
- query = {'%s__in' % self.query_field_name: instances}
+ query = {"%s__in" % self.query_field_name: instances}
queryset = queryset._next_is_sticky().filter(**query)
# M2M: need to annotate the query in order to get the primary model
@@ -965,13 +1059,18 @@
join_table = fk.model._meta.db_table
connection = connections[queryset.db]
qn = connection.ops.quote_name
- queryset = queryset.extra(select={
- '_prefetch_related_val_%s' % f.attname:
- '%s.%s' % (qn(join_table), qn(f.column)) for f in fk.local_related_fields})
+ queryset = queryset.extra(
+ select={
+ "_prefetch_related_val_%s"
+ % f.attname: "%s.%s"
+ % (qn(join_table), qn(f.column))
+ for f in fk.local_related_fields
+ }
+ )
return (
queryset,
lambda result: tuple(
- getattr(result, '_prefetch_related_val_%s' % f.attname)
+ getattr(result, "_prefetch_related_val_%s" % f.attname)
for f in fk.local_related_fields
),
lambda inst: tuple(
@@ -988,7 +1087,9 @@
db = router.db_for_write(self.through, instance=self.instance)
with transaction.atomic(using=db, savepoint=False):
self._add_items(
- self.source_field_name, self.target_field_name, *objs,
+ self.source_field_name,
+ self.target_field_name,
+ *objs,
through_defaults=through_defaults,
)
# If this is a symmetrical m2m relation to self, add the mirror
@@ -1000,30 +1101,41 @@
*objs,
through_defaults=through_defaults,
)
+
add.alters_data = True
def remove(self, *objs):
self._remove_prefetched_objects()
self._remove_items(self.source_field_name, self.target_field_name, *objs)
+
remove.alters_data = True
def clear(self):
db = router.db_for_write(self.through, instance=self.instance)
with transaction.atomic(using=db, savepoint=False):
signals.m2m_changed.send(
- sender=self.through, action="pre_clear",
- instance=self.instance, reverse=self.reverse,
- model=self.model, pk_set=None, using=db,
+ sender=self.through,
+ action="pre_clear",
+ instance=self.instance,
+ reverse=self.reverse,
+ model=self.model,
+ pk_set=None,
+ using=db,
)
self._remove_prefetched_objects()
filters = self._build_remove_filters(super().get_queryset().using(db))
self.through._default_manager.using(db).filter(filters).delete()
signals.m2m_changed.send(
- sender=self.through, action="post_clear",
- instance=self.instance, reverse=self.reverse,
- model=self.model, pk_set=None, using=db,
+ sender=self.through,
+ action="post_clear",
+ instance=self.instance,
+ reverse=self.reverse,
+ model=self.model,
+ pk_set=None,
+ using=db,
)
+
clear.alters_data = True
def set(self, objs, *, clear=False, through_defaults=None):
@@ -1037,7 +1149,11 @@
self.clear()
self.add(*objs, through_defaults=through_defaults)
else:
- old_ids = set(self.using(db).values_list(self.target_field.target_field.attname, flat=True))
+ old_ids = set(
+ self.using(db).values_list(
+ self.target_field.target_field.attname, flat=True
+ )
+ )
new_objs = []
for obj in objs:
@@ -1053,6 +1169,7 @@
self.remove(*old_ids)
self.add(*new_objs, through_defaults=through_defaults)
+
set.alters_data = True
def create(self, *, through_defaults=None, **kwargs):
@@ -1060,26 +1177,33 @@
new_obj = super(ManyRelatedManager, self.db_manager(db)).create(**kwargs)
self.add(new_obj, through_defaults=through_defaults)
return new_obj
+
create.alters_data = True
def get_or_create(self, *, through_defaults=None, **kwargs):
db = router.db_for_write(self.instance.__class__, instance=self.instance)
- obj, created = super(ManyRelatedManager, self.db_manager(db)).get_or_create(**kwargs)
+ obj, created = super(ManyRelatedManager, self.db_manager(db)).get_or_create(
+ **kwargs
+ )
# We only need to add() if created because if we got an object back
# from get() then the relationship already exists.
if created:
self.add(obj, through_defaults=through_defaults)
return obj, created
+
get_or_create.alters_data = True
def update_or_create(self, *, through_defaults=None, **kwargs):
db = router.db_for_write(self.instance.__class__, instance=self.instance)
- obj, created = super(ManyRelatedManager, self.db_manager(db)).update_or_create(**kwargs)
+ obj, created = super(
+ ManyRelatedManager, self.db_manager(db)
+ ).update_or_create(**kwargs)
# We only need to add() if created because if we got an object back
# from get() then the relationship already exists.
if created:
self.add(obj, through_defaults=through_defaults)
return obj, created
+
update_or_create.alters_data = True
def _get_target_ids(self, target_field_name, objs):
@@ -1087,6 +1211,7 @@
Return the set of ids of `objs` that the target field references.
"""
from django.db.models import Model
+
target_ids = set()
target_field = self.through._meta.get_field(target_field_name)
for obj in objs:
@@ -1094,36 +1219,42 @@
if not router.allow_relation(obj, self.instance):
raise ValueError(
'Cannot add "%r": instance is on database "%s", '
- 'value is on database "%s"' %
- (obj, self.instance._state.db, obj._state.db)
+ 'value is on database "%s"'
+ % (obj, self.instance._state.db, obj._state.db)
)
target_id = target_field.get_foreign_related_value(obj)[0]
if target_id is None:
raise ValueError(
- 'Cannot add "%r": the value for field "%s" is None' %
- (obj, target_field_name)
+ 'Cannot add "%r": the value for field "%s" is None'
+ % (obj, target_field_name)
)
target_ids.add(target_id)
elif isinstance(obj, Model):
raise TypeError(
- "'%s' instance expected, got %r" %
- (self.model._meta.object_name, obj)
+ "'%s' instance expected, got %r"
+ % (self.model._meta.object_name, obj)
)
else:
target_ids.add(target_field.get_prep_value(obj))
return target_ids
- def _get_missing_target_ids(self, source_field_name, target_field_name, db, target_ids):
+ def _get_missing_target_ids(
+ self, source_field_name, target_field_name, db, target_ids
+ ):
"""
Return the subset of ids of `objs` that aren't already assigned to
this relationship.
"""
- vals = self.through._default_manager.using(db).values_list(
- target_field_name, flat=True
- ).filter(**{
- source_field_name: self.related_val[0],
- '%s__in' % target_field_name: target_ids,
- })
+ vals = (
+ self.through._default_manager.using(db)
+ .values_list(target_field_name, flat=True)
+ .filter(
+ **{
+ source_field_name: self.related_val[0],
+ "%s__in" % target_field_name: target_ids,
+ }
+ )
+ )
return target_ids.difference(vals)
def _get_add_plan(self, db, source_field_name):
@@ -1141,39 +1272,53 @@
# user-defined intermediary models as they could have other fields
# causing conflicts which must be surfaced.
can_ignore_conflicts = (
- connections[db].features.supports_ignore_conflicts and
- self.through._meta.auto_created is not False
+ connections[db].features.supports_ignore_conflicts
+ and self.through._meta.auto_created is not False
)
# Don't send the signal when inserting duplicate data row
# for symmetrical reverse entries.
- must_send_signals = (self.reverse or source_field_name == self.source_field_name) and (
- signals.m2m_changed.has_listeners(self.through)
- )
+ must_send_signals = (
+ self.reverse or source_field_name == self.source_field_name
+ ) and (signals.m2m_changed.has_listeners(self.through))
# Fast addition through bulk insertion can only be performed
# if no m2m_changed listeners are connected for self.through
# as they require the added set of ids to be provided via
# pk_set.
- return can_ignore_conflicts, must_send_signals, (can_ignore_conflicts and not must_send_signals)
+ return (
+ can_ignore_conflicts,
+ must_send_signals,
+ (can_ignore_conflicts and not must_send_signals),
+ )
- def _add_items(self, source_field_name, target_field_name, *objs, through_defaults=None):
+ def _add_items(
+ self, source_field_name, target_field_name, *objs, through_defaults=None
+ ):
# source_field_name: the PK fieldname in join table for the source object
# target_field_name: the PK fieldname in join table for the target object
- # *objs - objects to add. Either object instances, or primary keys of object instances.
+ # *objs - objects to add. Either object instances, or primary keys
+ # of object instances.
if not objs:
return
through_defaults = dict(resolve_callables(through_defaults or {}))
target_ids = self._get_target_ids(target_field_name, objs)
db = router.db_for_write(self.through, instance=self.instance)
- can_ignore_conflicts, must_send_signals, can_fast_add = self._get_add_plan(db, source_field_name)
+ can_ignore_conflicts, must_send_signals, can_fast_add = self._get_add_plan(
+ db, source_field_name
+ )
if can_fast_add:
- self.through._default_manager.using(db).bulk_create([
- self.through(**{
- '%s_id' % source_field_name: self.related_val[0],
- '%s_id' % target_field_name: target_id,
- })
- for target_id in target_ids
- ], ignore_conflicts=True)
+ self.through._default_manager.using(db).bulk_create(
+ [
+ self.through(
+ **{
+ "%s_id" % source_field_name: self.related_val[0],
+ "%s_id" % target_field_name: target_id,
+ }
+ )
+ for target_id in target_ids
+ ],
+ ignore_conflicts=True,
+ )
return
missing_target_ids = self._get_missing_target_ids(
@@ -1182,24 +1327,38 @@
with transaction.atomic(using=db, savepoint=False):
if must_send_signals:
signals.m2m_changed.send(
- sender=self.through, action='pre_add',
- instance=self.instance, reverse=self.reverse,
- model=self.model, pk_set=missing_target_ids, using=db,
+ sender=self.through,
+ action="pre_add",
+ instance=self.instance,
+ reverse=self.reverse,
+ model=self.model,
+ pk_set=missing_target_ids,
+ using=db,
)
# Add the ones that aren't there already.
- self.through._default_manager.using(db).bulk_create([
- self.through(**through_defaults, **{
- '%s_id' % source_field_name: self.related_val[0],
- '%s_id' % target_field_name: target_id,
- })
- for target_id in missing_target_ids
- ], ignore_conflicts=can_ignore_conflicts)
+ self.through._default_manager.using(db).bulk_create(
+ [
+ self.through(
+ **through_defaults,
+ **{
+ "%s_id" % source_field_name: self.related_val[0],
+ "%s_id" % target_field_name: target_id,
+ },
+ )
+ for target_id in missing_target_ids
+ ],
+ ignore_conflicts=can_ignore_conflicts,
+ )
if must_send_signals:
signals.m2m_changed.send(
- sender=self.through, action='post_add',
- instance=self.instance, reverse=self.reverse,
- model=self.model, pk_set=missing_target_ids, using=db,
+ sender=self.through,
+ action="post_add",
+ instance=self.instance,
+ reverse=self.reverse,
+ model=self.model,
+ pk_set=missing_target_ids,
+ using=db,
)
def _remove_items(self, source_field_name, target_field_name, *objs):
@@ -1223,23 +1382,32 @@
with transaction.atomic(using=db, savepoint=False):
# Send a signal to the other end if need be.
signals.m2m_changed.send(
- sender=self.through, action="pre_remove",
- instance=self.instance, reverse=self.reverse,
- model=self.model, pk_set=old_ids, using=db,
+ sender=self.through,
+ action="pre_remove",
+ instance=self.instance,
+ reverse=self.reverse,
+ model=self.model,
+ pk_set=old_ids,
+ using=db,
)
target_model_qs = super().get_queryset()
if target_model_qs._has_filters():
- old_vals = target_model_qs.using(db).filter(**{
- '%s__in' % self.target_field.target_field.attname: old_ids})
+ old_vals = target_model_qs.using(db).filter(
+ **{"%s__in" % self.target_field.target_field.attname: old_ids}
+ )
else:
old_vals = old_ids
filters = self._build_remove_filters(old_vals)
self.through._default_manager.using(db).filter(filters).delete()
signals.m2m_changed.send(
- sender=self.through, action="post_remove",
- instance=self.instance, reverse=self.reverse,
- model=self.model, pk_set=old_ids, using=db,
+ sender=self.through,
+ action="post_remove",
+ instance=self.instance,
+ reverse=self.reverse,
+ model=self.model,
+ pk_set=old_ids,
+ using=db,
)
return ManyRelatedManager
diff --git a/docs/1.0-dev/_modules/django/db/models/manager.html b/docs/1.0-dev/_modules/django/db/models/manager.html
index 7e5b324f28..33d8ca81b1 100644
--- a/docs/1.0-dev/_modules/django/db/models/manager.html
+++ b/docs/1.0-dev/_modules/django/db/models/manager.html
@@ -74,7 +74,7 @@
def __str__(self):
"""Return "app_label.model_label.manager_name"."""
- return '%s.%s' % (self.model._meta.label, self.name)
+ return "%s.%s" % (self.model._meta.label, self.name)
def __class_getitem__(cls, *args, **kwargs):
return cls
@@ -87,12 +87,12 @@
Raise a ValueError if the manager is dynamically generated.
"""
qs_class = self._queryset_class
- if getattr(self, '_built_with_as_manager', False):
+ if getattr(self, "_built_with_as_manager", False):
# using MyQuerySet.as_manager()
return (
True, # as_manager
None, # manager_class
- '%s.%s' % (qs_class.__module__, qs_class.__name__), # qs_class
+ "%s.%s" % (qs_class.__module__, qs_class.__name__), # qs_class
None, # args
None, # kwargs
)
@@ -110,7 +110,7 @@
)
return (
False, # as_manager
- '%s.%s' % (module_name, name), # manager_class
+ "%s.%s" % (module_name, name), # manager_class
None, # qs_class
self._constructor_args[0], # args
self._constructor_args[1], # kwargs
@@ -124,18 +124,22 @@
def create_method(name, method):
def manager_method(self, *args, **kwargs):
return getattr(self.get_queryset(), name)(*args, **kwargs)
+
manager_method.__name__ = method.__name__
manager_method.__doc__ = method.__doc__
return manager_method
new_methods = {}
- for name, method in inspect.getmembers(queryset_class, predicate=inspect.isfunction):
+ for name, method in inspect.getmembers(
+ queryset_class, predicate=inspect.isfunction
+ ):
# Only copy missing methods.
if hasattr(cls, name):
continue
- # Only copy public methods or methods with the attribute `queryset_only=False`.
- queryset_only = getattr(method, 'queryset_only', None)
- if queryset_only or (queryset_only is None and name.startswith('_')):
+ # Only copy public methods or methods with the attribute
+ # queryset_only=False.
+ queryset_only = getattr(method, "queryset_only", None)
+ if queryset_only or (queryset_only is None and name.startswith("_")):
continue
# Copy the method onto the manager.
new_methods[name] = create_method(name, method)
@@ -144,11 +148,15 @@
@classmethod
def from_queryset(cls, queryset_class, class_name=None):
if class_name is None:
- class_name = '%sFrom%s' % (cls.__name__, queryset_class.__name__)
- return type(class_name, (cls,), {
- '_queryset_class': queryset_class,
- **cls._get_queryset_methods(queryset_class),
- })
+ class_name = "%sFrom%s" % (cls.__name__, queryset_class.__name__)
+ return type(
+ class_name,
+ (cls,),
+ {
+ "_queryset_class": queryset_class,
+ **cls._get_queryset_methods(queryset_class),
+ },
+ )
def contribute_to_class(self, cls, name):
self.name = self.name or name
@@ -198,8 +206,8 @@
def __eq__(self, other):
return (
- isinstance(other, self.__class__) and
- self._constructor_args == other._constructor_args
+ isinstance(other, self.__class__)
+ and self._constructor_args == other._constructor_args
)
def __hash__(self):
@@ -211,22 +219,24 @@
class ManagerDescriptor:
-
def __init__(self, manager):
self.manager = manager
def __get__(self, instance, cls=None):
if instance is not None:
- raise AttributeError("Manager isn't accessible via %s instances" % cls.__name__)
+ raise AttributeError(
+ "Manager isn't accessible via %s instances" % cls.__name__
+ )
if cls._meta.abstract:
- raise AttributeError("Manager isn't available; %s is abstract" % (
- cls._meta.object_name,
- ))
+ raise AttributeError(
+ "Manager isn't available; %s is abstract" % (cls._meta.object_name,)
+ )
if cls._meta.swapped:
raise AttributeError(
- "Manager isn't available; '%s' has been swapped for '%s'" % (
+ "Manager isn't available; '%s' has been swapped for '%s'"
+ % (
cls._meta.label,
cls._meta.swapped,
)
diff --git a/docs/1.0-dev/_modules/django/db/models/query.html b/docs/1.0-dev/_modules/django/db/models/query.html
index 586e804a49..b19eb7713f 100644
--- a/docs/1.0-dev/_modules/django/db/models/query.html
+++ b/docs/1.0-dev/_modules/django/db/models/query.html
@@ -52,8 +52,12 @@
from django.conf import settings
from django.core import exceptions
from django.db import (
- DJANGO_VERSION_PICKLE_KEY, IntegrityError, NotSupportedError, connections,
- router, transaction,
+ DJANGO_VERSION_PICKLE_KEY,
+ IntegrityError,
+ NotSupportedError,
+ connections,
+ router,
+ transaction,
)
from django.db.models import AutoField, DateField, DateTimeField, sql
from django.db.models.constants import LOOKUP_SEP
@@ -74,7 +78,9 @@
class BaseIterable:
- def __init__(self, queryset, chunked_fetch=False, chunk_size=GET_ITERATOR_CHUNK_SIZE):
+ def __init__(
+ self, queryset, chunked_fetch=False, chunk_size=GET_ITERATOR_CHUNK_SIZE
+ ):
self.queryset = queryset
self.chunked_fetch = chunked_fetch
self.chunk_size = chunk_size
@@ -89,25 +95,40 @@
compiler = queryset.query.get_compiler(using=db)
# Execute the query. This will also fill compiler.select, klass_info,
# and annotations.
- results = compiler.execute_sql(chunked_fetch=self.chunked_fetch, chunk_size=self.chunk_size)
- select, klass_info, annotation_col_map = (compiler.select, compiler.klass_info,
- compiler.annotation_col_map)
- model_cls = klass_info['model']
- select_fields = klass_info['select_fields']
+ results = compiler.execute_sql(
+ chunked_fetch=self.chunked_fetch, chunk_size=self.chunk_size
+ )
+ select, klass_info, annotation_col_map = (
+ compiler.select,
+ compiler.klass_info,
+ compiler.annotation_col_map,
+ )
+ model_cls = klass_info["model"]
+ select_fields = klass_info["select_fields"]
model_fields_start, model_fields_end = select_fields[0], select_fields[-1] + 1
- init_list = [f[0].target.attname
- for f in select[model_fields_start:model_fields_end]]
+ init_list = [
+ f[0].target.attname for f in select[model_fields_start:model_fields_end]
+ ]
related_populators = get_related_populators(klass_info, select, db)
known_related_objects = [
- (field, related_objs, operator.attrgetter(*[
- field.attname
- if from_field == 'self' else
- queryset.model._meta.get_field(from_field).attname
- for from_field in field.from_fields
- ])) for field, related_objs in queryset._known_related_objects.items()
+ (
+ field,
+ related_objs,
+ operator.attrgetter(
+ *[
+ field.attname
+ if from_field == "self"
+ else queryset.model._meta.get_field(from_field).attname
+ for from_field in field.from_fields
+ ]
+ ),
+ )
+ for field, related_objs in queryset._known_related_objects.items()
]
for row in compiler.results_iter(results):
- obj = model_cls.from_db(db, init_list, row[model_fields_start:model_fields_end])
+ obj = model_cls.from_db(
+ db, init_list, row[model_fields_start:model_fields_end]
+ )
for rel_populator in related_populators:
rel_populator.populate(row, obj)
if annotation_col_map:
@@ -147,7 +168,9 @@
*query.annotation_select,
]
indexes = range(len(names))
- for row in compiler.results_iter(chunked_fetch=self.chunked_fetch, chunk_size=self.chunk_size):
+ for row in compiler.results_iter(
+ chunked_fetch=self.chunked_fetch, chunk_size=self.chunk_size
+ ):
yield {names[i]: row[i] for i in indexes}
@@ -169,16 +192,25 @@
*query.values_select,
*query.annotation_select,
]
- fields = [*queryset._fields, *(f for f in query.annotation_select if f not in queryset._fields)]
+ fields = [
+ *queryset._fields,
+ *(f for f in query.annotation_select if f not in queryset._fields),
+ ]
if fields != names:
# Reorder according to fields.
index_map = {name: idx for idx, name in enumerate(names)}
rowfactory = operator.itemgetter(*[index_map[f] for f in fields])
return map(
rowfactory,
- compiler.results_iter(chunked_fetch=self.chunked_fetch, chunk_size=self.chunk_size)
+ compiler.results_iter(
+ chunked_fetch=self.chunked_fetch, chunk_size=self.chunk_size
+ ),
)
- return compiler.results_iter(tuple_expected=True, chunked_fetch=self.chunked_fetch, chunk_size=self.chunk_size)
+ return compiler.results_iter(
+ tuple_expected=True,
+ chunked_fetch=self.chunked_fetch,
+ chunk_size=self.chunk_size,
+ )
class NamedValuesListIterable(ValuesListIterable):
@@ -193,7 +225,11 @@
names = queryset._fields
else:
query = queryset.query
- names = [*query.extra_select, *query.values_select, *query.annotation_select]
+ names = [
+ *query.extra_select,
+ *query.values_select,
+ *query.annotation_select,
+ ]
tuple_class = create_namedtuple_class(*names)
new = tuple.__new__
for row in super().__iter__():
@@ -209,7 +245,9 @@
def __iter__(self):
queryset = self.queryset
compiler = queryset.query.get_compiler(queryset.db)
- for row in compiler.results_iter(chunked_fetch=self.chunked_fetch, chunk_size=self.chunk_size):
+ for row in compiler.results_iter(
+ chunked_fetch=self.chunked_fetch, chunk_size=self.chunk_size
+ ):
yield row[0]
@@ -249,9 +287,11 @@
def as_manager(cls):
# Address the circular dependency between `Queryset` and `Manager`.
from django.db.models.manager import Manager
+
manager = Manager.from_queryset(cls)()
manager._built_with_as_manager = True
return manager
+
as_manager.queryset_only = True
as_manager = classmethod(as_manager)
@@ -263,7 +303,7 @@
"""Don't populate the QuerySet's cache."""
obj = self.__class__()
for k, v in self.__dict__.items():
- if k == '_result_cache':
+ if k == "_result_cache":
obj.__dict__[k] = None
else:
obj.__dict__[k] = copy.deepcopy(v, memo)
@@ -294,10 +334,10 @@
self.__dict__.update(state)
def __repr__(self):
- data = list(self[:REPR_OUTPUT_SIZE + 1])
+ data = list(self[: REPR_OUTPUT_SIZE + 1])
if len(data) > REPR_OUTPUT_SIZE:
data[-1] = "...(remaining elements truncated)..."
- return '<%s %r>' % (self.__class__.__name__, data)
+ return "<%s %r>" % (self.__class__.__name__, data)
def __len__(self):
self._fetch_all()
@@ -329,17 +369,17 @@
"""Retrieve an item or slice from the set of results."""
if not isinstance(k, (int, slice)):
raise TypeError(
- 'QuerySet indices must be integers or slices, not %s.'
+ "QuerySet indices must be integers or slices, not %s."
% type(k).__name__
)
- if (
- (isinstance(k, int) and k < 0) or
- (isinstance(k, slice) and (
- (k.start is not None and k.start < 0) or
- (k.stop is not None and k.stop < 0)
- ))
+ if (isinstance(k, int) and k < 0) or (
+ isinstance(k, slice)
+ and (
+ (k.start is not None and k.start < 0)
+ or (k.stop is not None and k.stop < 0)
+ )
):
- raise ValueError('Negative indexing is not supported.')
+ raise ValueError("Negative indexing is not supported.")
if self._result_cache is not None:
return self._result_cache[k]
@@ -355,7 +395,7 @@
else:
stop = None
qs.query.set_limits(start, stop)
- return list(qs)[::k.step] if k.step else qs
+ return list(qs)[:: k.step] if k.step else qs
qs = self._chain()
qs.query.set_limits(k, k + 1)
@@ -382,11 +422,15 @@
return other
if isinstance(other, EmptyQuerySet):
return self
- query = self if self.query.can_filter() else self.model._base_manager.filter(pk__in=self.values('pk'))
+ query = (
+ self
+ if self.query.can_filter()
+ else self.model._base_manager.filter(pk__in=self.values("pk"))
+ )
combined = query._chain()
combined._merge_known_related_objects(other)
if not other.query.can_filter():
- other = other.model._base_manager.filter(pk__in=other.values('pk'))
+ other = other.model._base_manager.filter(pk__in=other.values("pk"))
combined.query.combine(other.query, sql.OR)
return combined
@@ -395,7 +439,9 @@
####################################
def _iterator(self, use_chunked_fetch, chunk_size):
- yield from self._iterable_class(self, chunked_fetch=use_chunked_fetch, chunk_size=chunk_size)
+ yield from self._iterable_class(
+ self, chunked_fetch=use_chunked_fetch, chunk_size=chunk_size
+ )
def iterator(self, chunk_size=2000):
"""
@@ -403,8 +449,10 @@
database.
"""
if chunk_size <= 0:
- raise ValueError('Chunk size must be strictly positive.')
- use_chunked_fetch = not connections[self.db].settings_dict.get('DISABLE_SERVER_SIDE_CURSORS')
+ raise ValueError("Chunk size must be strictly positive.")
+ use_chunked_fetch = not connections[self.db].settings_dict.get(
+ "DISABLE_SERVER_SIDE_CURSORS"
+ )
return self._iterator(use_chunked_fetch, chunk_size)
def aggregate(self, *args, **kwargs):
@@ -417,7 +465,9 @@
"""
if self.query.distinct_fields:
raise NotImplementedError("aggregate() + distinct(fields) not implemented.")
- self._validate_values_are_expressions((*args, *kwargs.values()), method_name='aggregate')
+ self._validate_values_are_expressions(
+ (*args, *kwargs.values()), method_name="aggregate"
+ )
for arg in args:
# The default_alias property raises TypeError if default_alias
# can't be set automatically or AttributeError if it isn't an
@@ -435,7 +485,11 @@
if not annotation.contains_aggregate:
raise TypeError("%s is not an aggregate expression" % alias)
for expr in annotation.get_source_expressions():
- if expr.contains_aggregate and isinstance(expr, Ref) and expr.refs in kwargs:
+ if (
+ expr.contains_aggregate
+ and isinstance(expr, Ref)
+ and expr.refs in kwargs
+ ):
name = expr.refs
raise exceptions.FieldError(
"Cannot compute %s('%s'): '%s' is an aggregate"
@@ -463,14 +517,17 @@
"""
if self.query.combinator and (args or kwargs):
raise NotSupportedError(
- 'Calling QuerySet.get(...) with filters after %s() is not '
- 'supported.' % self.query.combinator
+ "Calling QuerySet.get(...) with filters after %s() is not "
+ "supported." % self.query.combinator
)
clone = self._chain() if self.query.combinator else self.filter(*args, **kwargs)
if self.query.can_filter() and not self.query.distinct_fields:
clone = clone.order_by()
limit = None
- if not clone.query.select_for_update or connections[clone.db].features.supports_select_for_update_with_limit:
+ if (
+ not clone.query.select_for_update
+ or connections[clone.db].features.supports_select_for_update_with_limit
+ ):
limit = MAX_GET_RESULTS
clone.query.set_limits(high=limit)
num = len(clone)
@@ -478,13 +535,13 @@
return clone._result_cache[0]
if not num:
raise self.model.DoesNotExist(
- "%s matching query does not exist." %
- self.model._meta.object_name
+ "%s matching query does not exist." % self.model._meta.object_name
)
raise self.model.MultipleObjectsReturned(
- 'get() returned more than one %s -- it returned %s!' % (
+ "get() returned more than one %s -- it returned %s!"
+ % (
self.model._meta.object_name,
- num if not limit or num < limit else 'more than %s' % (limit - 1),
+ num if not limit or num < limit else "more than %s" % (limit - 1),
)
)
@@ -503,7 +560,7 @@
if obj.pk is None:
# Populate new PK values.
obj.pk = obj._meta.pk.get_pk_value_on_save(obj)
- obj._prepare_related_fields_for_save(operation_name='bulk_create')
+ obj._prepare_related_fields_for_save(operation_name="bulk_create")
def bulk_create(self, objs, batch_size=None, ignore_conflicts=False):
"""
@@ -526,7 +583,7 @@
# Oracle as well, but the semantics for extracting the primary keys is
# trickier so it's not done yet.
if batch_size is not None and batch_size <= 0:
- raise ValueError('Batch size must be a positive integer.')
+ raise ValueError("Batch size must be a positive integer.")
# Check that the parents share the same concrete model with the our
# model to detect the inheritance pattern ConcreteGrandParent ->
# MultiTableParent -> ProxyChild. Simply checking self.model._meta.proxy
@@ -546,7 +603,10 @@
objs_with_pk, objs_without_pk = partition(lambda o: o.pk is None, objs)
if objs_with_pk:
returned_columns = self._batched_insert(
- objs_with_pk, fields, batch_size, ignore_conflicts=ignore_conflicts,
+ objs_with_pk,
+ fields,
+ batch_size,
+ ignore_conflicts=ignore_conflicts,
)
for obj_with_pk, results in zip(objs_with_pk, returned_columns):
for result, field in zip(results, opts.db_returning_fields):
@@ -558,9 +618,15 @@
if objs_without_pk:
fields = [f for f in fields if not isinstance(f, AutoField)]
returned_columns = self._batched_insert(
- objs_without_pk, fields, batch_size, ignore_conflicts=ignore_conflicts,
+ objs_without_pk,
+ fields,
+ batch_size,
+ ignore_conflicts=ignore_conflicts,
)
- if connection.features.can_return_rows_from_bulk_insert and not ignore_conflicts:
+ if (
+ connection.features.can_return_rows_from_bulk_insert
+ and not ignore_conflicts
+ ):
assert len(returned_columns) == len(objs_without_pk)
for obj_without_pk, results in zip(objs_without_pk, returned_columns):
for result, field in zip(results, opts.db_returning_fields):
@@ -575,25 +641,27 @@
Update the given fields in each of the given objects in the database.
"""
if batch_size is not None and batch_size < 0:
- raise ValueError('Batch size must be a positive integer.')
+ raise ValueError("Batch size must be a positive integer.")
if not fields:
- raise ValueError('Field names must be given to bulk_update().')
+ raise ValueError("Field names must be given to bulk_update().")
objs = tuple(objs)
if any(obj.pk is None for obj in objs):
- raise ValueError('All bulk_update() objects must have a primary key set.')
+ raise ValueError("All bulk_update() objects must have a primary key set.")
fields = [self.model._meta.get_field(name) for name in fields]
if any(not f.concrete or f.many_to_many for f in fields):
- raise ValueError('bulk_update() can only be used with concrete fields.')
+ raise ValueError("bulk_update() can only be used with concrete fields.")
if any(f.primary_key for f in fields):
- raise ValueError('bulk_update() cannot be used with primary key fields.')
+ raise ValueError("bulk_update() cannot be used with primary key fields.")
if not objs:
return 0
# PK is used twice in the resulting update query, once in the filter
# and once in the WHEN. Each field will also have one CAST.
- max_batch_size = connections[self.db].ops.bulk_batch_size(['pk', 'pk'] + fields, objs)
+ max_batch_size = connections[self.db].ops.bulk_batch_size(
+ ["pk", "pk"] + fields, objs
+ )
batch_size = min(batch_size, max_batch_size) if batch_size else max_batch_size
requires_casting = connections[self.db].features.requires_casted_case_in_updates
- batches = (objs[i:i + batch_size] for i in range(0, len(objs), batch_size))
+ batches = (objs[i : i + batch_size] for i in range(0, len(objs), batch_size))
updates = []
for batch_objs in batches:
update_kwargs = {}
@@ -614,6 +682,7 @@
for pks, update_kwargs in updates:
rows_updated += self.filter(pk__in=pks).update(**update_kwargs)
return rows_updated
+
bulk_update.alters_data = True
def get_or_create(self, defaults=None, **kwargs):
@@ -680,10 +749,12 @@
invalid_params.append(param)
if invalid_params:
raise exceptions.FieldError(
- "Invalid field name(s) for model %s: '%s'." % (
+ "Invalid field name(s) for model %s: '%s'."
+ % (
self.model._meta.object_name,
"', '".join(sorted(invalid_params)),
- ))
+ )
+ )
return params
def _earliest(self, *fields):
@@ -694,7 +765,7 @@
if fields:
order_by = fields
else:
- order_by = getattr(self.model._meta, 'get_latest_by')
+ order_by = getattr(self.model._meta, "get_latest_by")
if order_by and not isinstance(order_by, (tuple, list)):
order_by = (order_by,)
if order_by is None:
@@ -710,25 +781,25 @@
def earliest(self, *fields):
if self.query.is_sliced:
- raise TypeError('Cannot change a query once a slice has been taken.')
+ raise TypeError("Cannot change a query once a slice has been taken.")
return self._earliest(*fields)
def latest(self, *fields):
if self.query.is_sliced:
- raise TypeError('Cannot change a query once a slice has been taken.')
+ raise TypeError("Cannot change a query once a slice has been taken.")
return self.reverse()._earliest(*fields)
def first(self):
"""Return the first object of a query or None if no match is found."""
- for obj in (self if self.ordered else self.order_by('pk'))[:1]:
+ for obj in (self if self.ordered else self.order_by("pk"))[:1]:
return obj
def last(self):
"""Return the last object of a query or None if no match is found."""
- for obj in (self.reverse() if self.ordered else self.order_by('-pk'))[:1]:
+ for obj in (self.reverse() if self.ordered else self.order_by("-pk"))[:1]:
return obj
- def in_bulk(self, id_list=None, *, field_name='pk'):
+ def in_bulk(self, id_list=None, *, field_name="pk"):
"""
Return a dictionary mapping each of the given IDs to the object with
that ID. If `id_list` isn't provided, evaluate the entire QuerySet.
@@ -742,16 +813,19 @@
if len(constraint.fields) == 1
]
if (
- field_name != 'pk' and
- not opts.get_field(field_name).unique and
- field_name not in unique_fields and
- self.query.distinct_fields != (field_name,)
+ field_name != "pk"
+ and not opts.get_field(field_name).unique
+ and field_name not in unique_fields
+ and self.query.distinct_fields != (field_name,)
):
- raise ValueError("in_bulk()'s field_name must be a unique field but %r isn't." % field_name)
+ raise ValueError(
+ "in_bulk()'s field_name must be a unique field but %r isn't."
+ % field_name
+ )
if id_list is not None:
if not id_list:
return {}
- filter_key = '{}__in'.format(field_name)
+ filter_key = "{}__in".format(field_name)
batch_size = connections[self.db].features.max_query_params
id_list = tuple(id_list)
# If the database has a limit on the number of query parameters
@@ -759,7 +833,7 @@
if batch_size and batch_size < len(id_list):
qs = ()
for offset in range(0, len(id_list), batch_size):
- batch = id_list[offset:offset + batch_size]
+ batch = id_list[offset : offset + batch_size]
qs += tuple(self.filter(**{filter_key: batch}).order_by())
else:
qs = self.filter(**{filter_key: id_list}).order_by()
@@ -769,11 +843,11 @@
def delete(self):
"""Delete the records in the current QuerySet."""
- self._not_support_combined_queries('delete')
+ self._not_support_combined_queries("delete")
if self.query.is_sliced:
raise TypeError("Cannot use 'limit' or 'offset' with delete().")
if self.query.distinct or self.query.distinct_fields:
- raise TypeError('Cannot call delete() after .distinct().')
+ raise TypeError("Cannot call delete() after .distinct().")
if self._fields is not None:
raise TypeError("Cannot call delete() after .values() or .values_list()")
@@ -812,6 +886,7 @@
with cursor:
return cursor.rowcount
return 0
+
_raw_delete.alters_data = True
def update(self, **kwargs):
@@ -819,9 +894,9 @@
Update all elements in the current QuerySet, setting all the given
fields to the appropriate values.
"""
- self._not_support_combined_queries('update')
+ self._not_support_combined_queries("update")
if self.query.is_sliced:
- raise TypeError('Cannot update a query once a slice has been taken.')
+ raise TypeError("Cannot update a query once a slice has been taken.")
self._for_write = True
query = self.query.chain(sql.UpdateQuery)
query.add_update_values(kwargs)
@@ -831,6 +906,7 @@
rows = query.get_compiler(self.db).execute_sql(CURSOR)
self._result_cache = None
return rows
+
update.alters_data = True
def _update(self, values):
@@ -841,13 +917,14 @@
useful at that level).
"""
if self.query.is_sliced:
- raise TypeError('Cannot update a query once a slice has been taken.')
+ raise TypeError("Cannot update a query once a slice has been taken.")
query = self.query.chain(sql.UpdateQuery)
query.add_update_fields(values)
# Clear any annotations so that they won't be present in subqueries.
query.annotations = {}
self._result_cache = None
return query.get_compiler(self.db).execute_sql(CURSOR)
+
_update.alters_data = True
_update.queryset_only = False
@@ -858,11 +935,10 @@
def contains(self, obj):
"""Return True if the queryset contains an object."""
- self._not_support_combined_queries('contains')
+ self._not_support_combined_queries("contains")
if self._fields is not None:
raise TypeError(
- 'Cannot call QuerySet.contains() after .values() or '
- '.values_list().'
+ "Cannot call QuerySet.contains() after .values() or .values_list()."
)
try:
if obj._meta.concrete_model != self.model._meta.concrete_model:
@@ -870,9 +946,7 @@
except AttributeError:
raise TypeError("'obj' must be a model instance.")
if obj.pk is None:
- raise ValueError(
- 'QuerySet.contains() cannot be used on unsaved objects.'
- )
+ raise ValueError("QuerySet.contains() cannot be used on unsaved objects.")
if self._result_cache is not None:
return obj in self._result_cache
return self.filter(pk=obj.pk).exists()
@@ -892,7 +966,13 @@
def raw(self, raw_query, params=(), translations=None, using=None):
if using is None:
using = self.db
- qs = RawQuerySet(raw_query, model=self.model, params=params, translations=translations, using=using)
+ qs = RawQuerySet(
+ raw_query,
+ model=self.model,
+ params=params,
+ translations=translations,
+ using=using,
+ )
qs._prefetch_related_lookups = self._prefetch_related_lookups[:]
return qs
@@ -914,15 +994,20 @@
if flat and named:
raise TypeError("'flat' and 'named' can't be used together.")
if flat and len(fields) > 1:
- raise TypeError("'flat' is not valid when values_list is called with more than one field.")
+ raise TypeError(
+ "'flat' is not valid when values_list is called with more than one "
+ "field."
+ )
- field_names = {f for f in fields if not hasattr(f, 'resolve_expression')}
+ field_names = {f for f in fields if not hasattr(f, "resolve_expression")}
_fields = []
expressions = {}
counter = 1
for field in fields:
- if hasattr(field, 'resolve_expression'):
- field_id_prefix = getattr(field, 'default_alias', field.__class__.__name__.lower())
+ if hasattr(field, "resolve_expression"):
+ field_id_prefix = getattr(
+ field, "default_alias", field.__class__.__name__.lower()
+ )
while True:
field_id = field_id_prefix + str(counter)
counter += 1
@@ -935,59 +1020,71 @@
clone = self._values(*_fields, **expressions)
clone._iterable_class = (
- NamedValuesListIterable if named
- else FlatValuesListIterable if flat
+ NamedValuesListIterable
+ if named
+ else FlatValuesListIterable
+ if flat
else ValuesListIterable
)
return clone
- def dates(self, field_name, kind, order='ASC'):
+ def dates(self, field_name, kind, order="ASC"):
"""
Return a list of date objects representing all available dates for
the given field_name, scoped to 'kind'.
"""
- if kind not in ('year', 'month', 'week', 'day'):
+ if kind not in ("year", "month", "week", "day"):
raise ValueError("'kind' must be one of 'year', 'month', 'week', or 'day'.")
- if order not in ('ASC', 'DESC'):
+ if order not in ("ASC", "DESC"):
raise ValueError("'order' must be either 'ASC' or 'DESC'.")
- return self.annotate(
- datefield=Trunc(field_name, kind, output_field=DateField()),
- plain_field=F(field_name)
- ).values_list(
- 'datefield', flat=True
- ).distinct().filter(plain_field__isnull=False).order_by(('-' if order == 'DESC' else '') + 'datefield')
+ return (
+ self.annotate(
+ datefield=Trunc(field_name, kind, output_field=DateField()),
+ plain_field=F(field_name),
+ )
+ .values_list("datefield", flat=True)
+ .distinct()
+ .filter(plain_field__isnull=False)
+ .order_by(("-" if order == "DESC" else "") + "datefield")
+ )
# RemovedInDjango50Warning: when the deprecation ends, remove is_dst
# argument.
- def datetimes(self, field_name, kind, order='ASC', tzinfo=None, is_dst=timezone.NOT_PASSED):
+ def datetimes(
+ self, field_name, kind, order="ASC", tzinfo=None, is_dst=timezone.NOT_PASSED
+ ):
"""
Return a list of datetime objects representing all available
datetimes for the given field_name, scoped to 'kind'.
"""
- if kind not in ('year', 'month', 'week', 'day', 'hour', 'minute', 'second'):
+ if kind not in ("year", "month", "week", "day", "hour", "minute", "second"):
raise ValueError(
"'kind' must be one of 'year', 'month', 'week', 'day', "
"'hour', 'minute', or 'second'."
)
- if order not in ('ASC', 'DESC'):
+ if order not in ("ASC", "DESC"):
raise ValueError("'order' must be either 'ASC' or 'DESC'.")
if settings.USE_TZ:
if tzinfo is None:
tzinfo = timezone.get_current_timezone()
else:
tzinfo = None
- return self.annotate(
- datetimefield=Trunc(
- field_name,
- kind,
- output_field=DateTimeField(),
- tzinfo=tzinfo,
- is_dst=is_dst,
- ),
- plain_field=F(field_name)
- ).values_list(
- 'datetimefield', flat=True
- ).distinct().filter(plain_field__isnull=False).order_by(('-' if order == 'DESC' else '') + 'datetimefield')
+ return (
+ self.annotate(
+ datetimefield=Trunc(
+ field_name,
+ kind,
+ output_field=DateTimeField(),
+ tzinfo=tzinfo,
+ is_dst=is_dst,
+ ),
+ plain_field=F(field_name),
+ )
+ .values_list("datetimefield", flat=True)
+ .distinct()
+ .filter(plain_field__isnull=False)
+ .order_by(("-" if order == "DESC" else "") + "datetimefield")
+ )
def none(self):
"""Return an empty QuerySet."""
@@ -1011,7 +1108,7 @@
Return a new QuerySet instance with the args ANDed to the existing
set.
"""
- self._not_support_combined_queries('filter')
+ self._not_support_combined_queries("filter")
return self._filter_or_exclude(False, args, kwargs)
def exclude(self, *args, **kwargs):
@@ -1019,12 +1116,12 @@
Return a new QuerySet instance with NOT (args) ANDed to the existing
set.
"""
- self._not_support_combined_queries('exclude')
+ self._not_support_combined_queries("exclude")
return self._filter_or_exclude(True, args, kwargs)
def _filter_or_exclude(self, negate, args, kwargs):
if (args or kwargs) and self.query.is_sliced:
- raise TypeError('Cannot filter a query once a slice has been taken.')
+ raise TypeError("Cannot filter a query once a slice has been taken.")
clone = self._chain()
if self._defer_next_filter:
self._defer_next_filter = False
@@ -1062,7 +1159,9 @@
# Clear limits and ordering so they can be reapplied
clone.query.clear_ordering(force=True)
clone.query.clear_limits()
- clone.query.combined_queries = (self.query,) + tuple(qs.query for qs in other_qs)
+ clone.query.combined_queries = (self.query,) + tuple(
+ qs.query for qs in other_qs
+ )
clone.query.combinator = combinator
clone.query.combinator_all = all
return clone
@@ -1075,8 +1174,8 @@
return self
if len(qs) == 1:
return qs[0]
- return qs[0]._combinator_query('union', *qs[1:], all=all)
- return self._combinator_query('union', *other_qs, all=all)
+ return qs[0]._combinator_query("union", *qs[1:], all=all)
+ return self._combinator_query("union", *other_qs, all=all)
def intersection(self, *other_qs):
# If any query is an EmptyQuerySet, return it.
@@ -1085,13 +1184,13 @@
for other in other_qs:
if isinstance(other, EmptyQuerySet):
return other
- return self._combinator_query('intersection', *other_qs)
+ return self._combinator_query("intersection", *other_qs)
def difference(self, *other_qs):
# If the query is an EmptyQuerySet, return it.
if isinstance(self, EmptyQuerySet):
return self
- return self._combinator_query('difference', *other_qs)
+ return self._combinator_query("difference", *other_qs)
def select_for_update(self, nowait=False, skip_locked=False, of=(), no_key=False):
"""
@@ -1099,7 +1198,7 @@
FOR UPDATE lock.
"""
if nowait and skip_locked:
- raise ValueError('The nowait option cannot be used with skip_locked.')
+ raise ValueError("The nowait option cannot be used with skip_locked.")
obj = self._chain()
obj._for_write = True
obj.query.select_for_update = True
@@ -1118,9 +1217,11 @@
If select_related(None) is called, clear the list.
"""
- self._not_support_combined_queries('select_related')
+ self._not_support_combined_queries("select_related")
if self._fields is not None:
- raise TypeError("Cannot call select_related() after .values() or .values_list()")
+ raise TypeError(
+ "Cannot call select_related() after .values() or .values_list()"
+ )
obj = self._chain()
if fields == (None,):
@@ -1140,7 +1241,7 @@
When prefetch_related() is called more than once, append to the list of
prefetch lookups. If prefetch_related(None) is called, clear the list.
"""
- self._not_support_combined_queries('prefetch_related')
+ self._not_support_combined_queries("prefetch_related")
clone = self._chain()
if lookups == (None,):
clone._prefetch_related_lookups = ()
@@ -1150,7 +1251,9 @@
lookup = lookup.prefetch_to
lookup = lookup.split(LOOKUP_SEP, 1)[0]
if lookup in self.query._filtered_relations:
- raise ValueError('prefetch_related() is not supported with FilteredRelation.')
+ raise ValueError(
+ "prefetch_related() is not supported with FilteredRelation."
+ )
clone._prefetch_related_lookups = clone._prefetch_related_lookups + lookups
return clone
@@ -1159,26 +1262,29 @@
Return a query set in which the returned objects have been annotated
with extra data or aggregations.
"""
- self._not_support_combined_queries('annotate')
+ self._not_support_combined_queries("annotate")
return self._annotate(args, kwargs, select=True)
def alias(self, *args, **kwargs):
"""
Return a query set with added aliases for extra data or aggregations.
"""
- self._not_support_combined_queries('alias')
+ self._not_support_combined_queries("alias")
return self._annotate(args, kwargs, select=False)
def _annotate(self, args, kwargs, select=True):
- self._validate_values_are_expressions(args + tuple(kwargs.values()), method_name='annotate')
+ self._validate_values_are_expressions(
+ args + tuple(kwargs.values()), method_name="annotate"
+ )
annotations = {}
for arg in args:
# The default_alias property may raise a TypeError.
try:
if arg.default_alias in kwargs:
- raise ValueError("The named annotation '%s' conflicts with the "
- "default name for another annotation."
- % arg.default_alias)
+ raise ValueError(
+ "The named annotation '%s' conflicts with the "
+ "default name for another annotation." % arg.default_alias
+ )
except TypeError:
raise TypeError("Complex annotations require an alias")
annotations[arg.default_alias] = arg
@@ -1187,20 +1293,29 @@
clone = self._chain()
names = self._fields
if names is None:
- names = set(chain.from_iterable(
- (field.name, field.attname) if hasattr(field, 'attname') else (field.name,)
- for field in self.model._meta.get_fields()
- ))
+ names = set(
+ chain.from_iterable(
+ (field.name, field.attname)
+ if hasattr(field, "attname")
+ else (field.name,)
+ for field in self.model._meta.get_fields()
+ )
+ )
for alias, annotation in annotations.items():
if alias in names:
- raise ValueError("The annotation '%s' conflicts with a field on "
- "the model." % alias)
+ raise ValueError(
+ "The annotation '%s' conflicts with a field on "
+ "the model." % alias
+ )
if isinstance(annotation, FilteredRelation):
clone.query.add_filtered_relation(annotation, alias)
else:
clone.query.add_annotation(
- annotation, alias, is_summary=False, select=select,
+ annotation,
+ alias,
+ is_summary=False,
+ select=select,
)
for alias, annotation in clone.query.annotations.items():
if alias in annotations and annotation.contains_aggregate:
@@ -1215,7 +1330,7 @@
def order_by(self, *field_names):
"""Return a new QuerySet instance with the ordering changed."""
if self.query.is_sliced:
- raise TypeError('Cannot reorder a query once a slice has been taken.')
+ raise TypeError("Cannot reorder a query once a slice has been taken.")
obj = self._chain()
obj.query.clear_ordering(force=True, clear_default=False)
obj.query.add_ordering(*field_names)
@@ -1225,19 +1340,28 @@
"""
Return a new QuerySet instance that will select only distinct results.
"""
- self._not_support_combined_queries('distinct')
+ self._not_support_combined_queries("distinct")
if self.query.is_sliced:
- raise TypeError('Cannot create distinct fields once a slice has been taken.')
+ raise TypeError(
+ "Cannot create distinct fields once a slice has been taken."
+ )
obj = self._chain()
obj.query.add_distinct_fields(*field_names)
return obj
- def extra(self, select=None, where=None, params=None, tables=None,
- order_by=None, select_params=None):
+ def extra(
+ self,
+ select=None,
+ where=None,
+ params=None,
+ tables=None,
+ order_by=None,
+ select_params=None,
+ ):
"""Add extra SQL fragments to the query."""
- self._not_support_combined_queries('extra')
+ self._not_support_combined_queries("extra")
if self.query.is_sliced:
- raise TypeError('Cannot change a query once a slice has been taken.')
+ raise TypeError("Cannot change a query once a slice has been taken.")
clone = self._chain()
clone.query.add_extra(select, select_params, where, params, tables, order_by)
return clone
@@ -1245,7 +1369,7 @@
def reverse(self):
"""Reverse the ordering of the QuerySet."""
if self.query.is_sliced:
- raise TypeError('Cannot reverse a query once a slice has been taken.')
+ raise TypeError("Cannot reverse a query once a slice has been taken.")
clone = self._chain()
clone.query.standard_ordering = not clone.query.standard_ordering
return clone
@@ -1257,7 +1381,7 @@
The only exception to this is if None is passed in as the only
parameter, in which case removal all deferrals.
"""
- self._not_support_combined_queries('defer')
+ self._not_support_combined_queries("defer")
if self._fields is not None:
raise TypeError("Cannot call defer() after .values() or .values_list()")
clone = self._chain()
@@ -1273,7 +1397,7 @@
method and that are not already specified as deferred are loaded
immediately when the queryset is evaluated.
"""
- self._not_support_combined_queries('only')
+ self._not_support_combined_queries("only")
if self._fields is not None:
raise TypeError("Cannot call only() after .values() or .values_list()")
if fields == (None,):
@@ -1283,7 +1407,7 @@
for field in fields:
field = field.split(LOOKUP_SEP, 1)[0]
if field in self.query._filtered_relations:
- raise ValueError('only() is not supported with FilteredRelation.')
+ raise ValueError("only() is not supported with FilteredRelation.")
clone = self._chain()
clone.query.add_immediate_loading(fields)
return clone
@@ -1309,8 +1433,9 @@
if self.query.extra_order_by or self.query.order_by:
return True
elif (
- self.query.default_ordering and
- self.query.get_meta().ordering and
+ self.query.default_ordering
+ and self.query.get_meta().ordering
+ and
# A default ordering doesn't affect GROUP BY queries.
not self.query.group_by
):
@@ -1329,7 +1454,15 @@
# PRIVATE METHODS #
###################
- def _insert(self, objs, fields, returning_fields=None, raw=False, using=None, ignore_conflicts=False):
+ def _insert(
+ self,
+ objs,
+ fields,
+ returning_fields=None,
+ raw=False,
+ using=None,
+ ignore_conflicts=False,
+ ):
"""
Insert a new record for the given model. This provides an interface to
the InsertQuery class and is how Model.save() is implemented.
@@ -1340,6 +1473,7 @@
query = sql.InsertQuery(self.model, ignore_conflicts=ignore_conflicts)
query.insert_values(fields, objs, raw=raw)
return query.get_compiler(using=using).execute_sql(returning_fields)
+
_insert.alters_data = True
_insert.queryset_only = False
@@ -1347,22 +1481,36 @@
"""
Helper method for bulk_create() to insert objs one batch at a time.
"""
- if ignore_conflicts and not connections[self.db].features.supports_ignore_conflicts:
- raise NotSupportedError('This database backend does not support ignoring conflicts.')
+ if (
+ ignore_conflicts
+ and not connections[self.db].features.supports_ignore_conflicts
+ ):
+ raise NotSupportedError(
+ "This database backend does not support ignoring conflicts."
+ )
ops = connections[self.db].ops
max_batch_size = max(ops.bulk_batch_size(fields, objs), 1)
batch_size = min(batch_size, max_batch_size) if batch_size else max_batch_size
inserted_rows = []
bulk_return = connections[self.db].features.can_return_rows_from_bulk_insert
- for item in [objs[i:i + batch_size] for i in range(0, len(objs), batch_size)]:
+ for item in [objs[i : i + batch_size] for i in range(0, len(objs), batch_size)]:
if bulk_return and not ignore_conflicts:
- inserted_rows.extend(self._insert(
- item, fields=fields, using=self.db,
- returning_fields=self.model._meta.db_returning_fields,
- ignore_conflicts=ignore_conflicts,
- ))
+ inserted_rows.extend(
+ self._insert(
+ item,
+ fields=fields,
+ using=self.db,
+ returning_fields=self.model._meta.db_returning_fields,
+ ignore_conflicts=ignore_conflicts,
+ )
+ )
else:
- self._insert(item, fields=fields, using=self.db, ignore_conflicts=ignore_conflicts)
+ self._insert(
+ item,
+ fields=fields,
+ using=self.db,
+ ignore_conflicts=ignore_conflicts,
+ )
return inserted_rows
def _chain(self):
@@ -1381,7 +1529,12 @@
Return a copy of the current QuerySet. A lightweight alternative
to deepcopy().
"""
- c = self.__class__(model=self.model, query=self.query.chain(), using=self._db, hints=self._hints)
+ c = self.__class__(
+ model=self.model,
+ query=self.query.chain(),
+ using=self._db,
+ hints=self._hints,
+ )
c._sticky_filter = self._sticky_filter
c._for_write = self._for_write
c._prefetch_related_lookups = self._prefetch_related_lookups[:]
@@ -1413,9 +1566,10 @@
def _merge_sanity_check(self, other):
"""Check that two QuerySet classes may be merged."""
if self._fields is not None and (
- set(self.query.values_select) != set(other.query.values_select) or
- set(self.query.extra_select) != set(other.query.extra_select) or
- set(self.query.annotation_select) != set(other.query.annotation_select)):
+ set(self.query.values_select) != set(other.query.values_select)
+ or set(self.query.extra_select) != set(other.query.extra_select)
+ or set(self.query.annotation_select) != set(other.query.annotation_select)
+ ):
raise TypeError(
"Merging '%s' classes must involve the same values in each case."
% self.__class__.__name__
@@ -1432,10 +1586,11 @@
if self._fields and len(self._fields) > 1:
# values() queryset can only be used as nested queries
# if they are set up to select only a single field.
- raise TypeError('Cannot use multi-field values as a filter value.')
+ raise TypeError("Cannot use multi-field values as a filter value.")
query = self.query.resolve_expression(*args, **kwargs)
query._db = self._db
return query
+
resolve_expression.queryset_only = True
def _add_hints(self, **hints):
@@ -1455,19 +1610,22 @@
@staticmethod
def _validate_values_are_expressions(values, method_name):
- invalid_args = sorted(str(arg) for arg in values if not hasattr(arg, 'resolve_expression'))
+ invalid_args = sorted(
+ str(arg) for arg in values if not hasattr(arg, "resolve_expression")
+ )
if invalid_args:
raise TypeError(
- 'QuerySet.%s() received non-expression(s): %s.' % (
+ "QuerySet.%s() received non-expression(s): %s."
+ % (
method_name,
- ', '.join(invalid_args),
+ ", ".join(invalid_args),
)
)
def _not_support_combined_queries(self, operation_name):
if self.query.combinator:
raise NotSupportedError(
- 'Calling QuerySet.%s() after %s() is not supported.'
+ "Calling QuerySet.%s() after %s() is not supported."
% (operation_name, self.query.combinator)
)
@@ -1492,8 +1650,17 @@
Provide an iterator which converts the results of raw SQL queries into
annotated model instances.
"""
- def __init__(self, raw_query, model=None, query=None, params=(),
- translations=None, using=None, hints=None):
+
+ def __init__(
+ self,
+ raw_query,
+ model=None,
+ query=None,
+ params=(),
+ translations=None,
+ using=None,
+ hints=None,
+ ):
self.raw_query = raw_query
self.model = model
self._db = using
@@ -1508,10 +1675,17 @@
def resolve_model_init_order(self):
"""Resolve the init field names and value positions."""
converter = connections[self.db].introspection.identifier_converter
- model_init_fields = [f for f in self.model._meta.fields if converter(f.column) in self.columns]
- annotation_fields = [(column, pos) for pos, column in enumerate(self.columns)
- if column not in self.model_fields]
- model_init_order = [self.columns.index(converter(f.column)) for f in model_init_fields]
+ model_init_fields = [
+ f for f in self.model._meta.fields if converter(f.column) in self.columns
+ ]
+ annotation_fields = [
+ (column, pos)
+ for pos, column in enumerate(self.columns)
+ if column not in self.model_fields
+ ]
+ model_init_order = [
+ self.columns.index(converter(f.column)) for f in model_init_fields
+ ]
model_init_names = [f.attname for f in model_init_fields]
return model_init_names, model_init_order, annotation_fields
@@ -1531,8 +1705,13 @@
def _clone(self):
"""Same as QuerySet._clone()"""
c = self.__class__(
- self.raw_query, model=self.model, query=self.query, params=self.params,
- translations=self.translations, using=self._db, hints=self._hints
+ self.raw_query,
+ model=self.model,
+ query=self.query,
+ params=self.params,
+ translations=self.translations,
+ using=self._db,
+ hints=self._hints,
)
c._prefetch_related_lookups = self._prefetch_related_lookups[:]
return c
@@ -1558,23 +1737,27 @@
def iterator(self):
# Cache some things for performance reasons outside the loop.
db = self.db
- compiler = connections[db].ops.compiler('SQLCompiler')(
+ compiler = connections[db].ops.compiler("SQLCompiler")(
self.query, connections[db], db
)
query = iter(self.query)
try:
- model_init_names, model_init_pos, annotation_fields = self.resolve_model_init_order()
+ (
+ model_init_names,
+ model_init_pos,
+ annotation_fields,
+ ) = self.resolve_model_init_order()
if self.model._meta.pk.attname not in model_init_names:
raise exceptions.FieldDoesNotExist(
- 'Raw query must include the primary key'
+ "Raw query must include the primary key"
)
model_cls = self.model
fields = [self.model_fields.get(c) for c in self.columns]
- converters = compiler.get_converters([
- f.get_col(f.model._meta.db_table) if f else None for f in fields
- ])
+ converters = compiler.get_converters(
+ [f.get_col(f.model._meta.db_table) if f else None for f in fields]
+ )
if converters:
query = compiler.apply_converters(query, converters)
for values in query:
@@ -1587,7 +1770,7 @@
yield instance
finally:
# Done iterating the Query. If it has its own cursor, close it.
- if hasattr(self.query, 'cursor') and self.query.cursor:
+ if hasattr(self.query, "cursor") and self.query.cursor:
self.query.cursor.close()
def __repr__(self):
@@ -1604,9 +1787,11 @@
def using(self, alias):
"""Select the database this RawQuerySet should execute against."""
return RawQuerySet(
- self.raw_query, model=self.model,
+ self.raw_query,
+ model=self.model,
query=self.query.chain(using=alias),
- params=self.params, translations=self.translations,
+ params=self.params,
+ translations=self.translations,
using=alias,
)
@@ -1646,17 +1831,19 @@
# `prefetch_to` is the path to the attribute that stores the result.
self.prefetch_to = lookup
if queryset is not None and (
- isinstance(queryset, RawQuerySet) or (
- hasattr(queryset, '_iterable_class') and
- not issubclass(queryset._iterable_class, ModelIterable)
+ isinstance(queryset, RawQuerySet)
+ or (
+ hasattr(queryset, "_iterable_class")
+ and not issubclass(queryset._iterable_class, ModelIterable)
)
):
raise ValueError(
- 'Prefetch querysets cannot use raw(), values(), and '
- 'values_list().'
+ "Prefetch querysets cannot use raw(), values(), and values_list()."
)
if to_attr:
- self.prefetch_to = LOOKUP_SEP.join(lookup.split(LOOKUP_SEP)[:-1] + [to_attr])
+ self.prefetch_to = LOOKUP_SEP.join(
+ lookup.split(LOOKUP_SEP)[:-1] + [to_attr]
+ )
self.queryset = queryset
self.to_attr = to_attr
@@ -1668,7 +1855,7 @@
# Prevent the QuerySet from being evaluated
queryset._result_cache = []
queryset._prefetch_done = True
- obj_dict['queryset'] = queryset
+ obj_dict["queryset"] = queryset
return obj_dict
def add_prefix(self, prefix):
@@ -1676,7 +1863,7 @@
self.prefetch_to = prefix + LOOKUP_SEP + self.prefetch_to
def get_current_prefetch_to(self, level):
- return LOOKUP_SEP.join(self.prefetch_to.split(LOOKUP_SEP)[:level + 1])
+ return LOOKUP_SEP.join(self.prefetch_to.split(LOOKUP_SEP)[: level + 1])
def get_current_to_attr(self, level):
parts = self.prefetch_to.split(LOOKUP_SEP)
@@ -1721,7 +1908,7 @@
# We need to be able to dynamically add to the list of prefetch_related
# lookups that we look up (see below). So we need some book keeping to
# ensure we don't do duplicate work.
- done_queries = {} # dictionary of things like 'foo__bar': [results]
+ done_queries = {} # dictionary of things like 'foo__bar': [results]
auto_lookups = set() # we add to this as we go through.
followed_descriptors = set() # recursion protection
@@ -1731,8 +1918,11 @@
lookup = all_lookups.pop()
if lookup.prefetch_to in done_queries:
if lookup.queryset is not None:
- raise ValueError("'%s' lookup was already seen with a different queryset. "
- "You may need to adjust the ordering of your lookups." % lookup.prefetch_to)
+ raise ValueError(
+ "'%s' lookup was already seen with a different queryset. "
+ "You may need to adjust the ordering of your lookups."
+ % lookup.prefetch_to
+ )
continue
@@ -1758,7 +1948,7 @@
# Since prefetching can re-use instances, it is possible to have
# the same instance multiple times in obj_list, so obj might
# already be prepared.
- if not hasattr(obj, '_prefetched_objects_cache'):
+ if not hasattr(obj, "_prefetched_objects_cache"):
try:
obj._prefetched_objects_cache = {}
except (AttributeError, TypeError):
@@ -1778,20 +1968,30 @@
# of prefetch_related), so what applies to first object applies to all.
first_obj = obj_list[0]
to_attr = lookup.get_current_to_attr(level)[0]
- prefetcher, descriptor, attr_found, is_fetched = get_prefetcher(first_obj, through_attr, to_attr)
+ prefetcher, descriptor, attr_found, is_fetched = get_prefetcher(
+ first_obj, through_attr, to_attr
+ )
if not attr_found:
- raise AttributeError("Cannot find '%s' on %s object, '%s' is an invalid "
- "parameter to prefetch_related()" %
- (through_attr, first_obj.__class__.__name__, lookup.prefetch_through))
+ raise AttributeError(
+ "Cannot find '%s' on %s object, '%s' is an invalid "
+ "parameter to prefetch_related()"
+ % (
+ through_attr,
+ first_obj.__class__.__name__,
+ lookup.prefetch_through,
+ )
+ )
if level == len(through_attrs) - 1 and prefetcher is None:
# Last one, this *must* resolve to something that supports
# prefetching, otherwise there is no point adding it and the
# developer asking for it has made a mistake.
- raise ValueError("'%s' does not resolve to an item that supports "
- "prefetching - this is an invalid parameter to "
- "prefetch_related()." % lookup.prefetch_through)
+ raise ValueError(
+ "'%s' does not resolve to an item that supports "
+ "prefetching - this is an invalid parameter to "
+ "prefetch_related()." % lookup.prefetch_through
+ )
obj_to_fetch = None
if prefetcher is not None:
@@ -1808,9 +2008,15 @@
# same relationships to stop infinite recursion. So, if we
# are already on an automatically added lookup, don't add
# the new lookups from relationships we've seen already.
- if not (prefetch_to in done_queries and lookup in auto_lookups and descriptor in followed_descriptors):
+ if not (
+ prefetch_to in done_queries
+ and lookup in auto_lookups
+ and descriptor in followed_descriptors
+ ):
done_queries[prefetch_to] = obj_list
- new_lookups = normalize_prefetch_lookups(reversed(additional_lookups), prefetch_to)
+ new_lookups = normalize_prefetch_lookups(
+ reversed(additional_lookups), prefetch_to
+ )
auto_lookups.update(new_lookups)
all_lookups.extend(new_lookups)
followed_descriptors.add(descriptor)
@@ -1824,7 +2030,7 @@
# that we can continue with nullable or reverse relations.
new_obj_list = []
for obj in obj_list:
- if through_attr in getattr(obj, '_prefetched_objects_cache', ()):
+ if through_attr in getattr(obj, "_prefetched_objects_cache", ()):
# If related objects have been prefetched, use the
# cache rather than the object's through_attr.
new_obj = list(obj._prefetched_objects_cache.get(through_attr))
@@ -1856,6 +2062,7 @@
a function that takes an instance and returns a boolean that is True if
the attribute has already been fetched for that instance)
"""
+
def has_to_attr_attribute(instance):
return hasattr(instance, to_attr)
@@ -1873,7 +2080,7 @@
if rel_obj_descriptor:
# singly related object, descriptor object has the
# get_prefetch_queryset() method.
- if hasattr(rel_obj_descriptor, 'get_prefetch_queryset'):
+ if hasattr(rel_obj_descriptor, "get_prefetch_queryset"):
prefetcher = rel_obj_descriptor
is_fetched = rel_obj_descriptor.is_cached
else:
@@ -1881,17 +2088,21 @@
# the attribute on the instance rather than the class to
# support many related managers
rel_obj = getattr(instance, through_attr)
- if hasattr(rel_obj, 'get_prefetch_queryset'):
+ if hasattr(rel_obj, "get_prefetch_queryset"):
prefetcher = rel_obj
if through_attr != to_attr:
# Special case cached_property instances because hasattr
# triggers attribute computation and assignment.
- if isinstance(getattr(instance.__class__, to_attr, None), cached_property):
+ if isinstance(
+ getattr(instance.__class__, to_attr, None), cached_property
+ ):
+
def has_cached_property(instance):
return to_attr in instance.__dict__
is_fetched = has_cached_property
else:
+
def in_prefetched_cache(instance):
return through_attr in instance._prefetched_objects_cache
@@ -1922,8 +2133,14 @@
# The 'values to be matched' must be hashable as they will be used
# in a dictionary.
- rel_qs, rel_obj_attr, instance_attr, single, cache_name, is_descriptor = (
- prefetcher.get_prefetch_queryset(instances, lookup.get_current_queryset(level)))
+ (
+ rel_qs,
+ rel_obj_attr,
+ instance_attr,
+ single,
+ cache_name,
+ is_descriptor,
+ ) = prefetcher.get_prefetch_queryset(instances, lookup.get_current_queryset(level))
# We have to handle the possibility that the QuerySet we just got back
# contains some prefetch_related lookups. We don't want to trigger the
# prefetch_related functionality by evaluating the query. Rather, we need
@@ -1931,8 +2148,8 @@
# Copy the lookups in case it is a Prefetch object which could be reused
# later (happens in nested prefetch_related).
additional_lookups = [
- copy.copy(additional_lookup) for additional_lookup
- in getattr(rel_qs, '_prefetch_related_lookups', ())
+ copy.copy(additional_lookup)
+ for additional_lookup in getattr(rel_qs, "_prefetch_related_lookups", ())
]
if additional_lookups:
# Don't need to clone because the manager should have given us a fresh
@@ -1958,7 +2175,7 @@
except exceptions.FieldDoesNotExist:
pass
else:
- msg = 'to_attr={} conflicts with a field on the {} model.'
+ msg = "to_attr={} conflicts with a field on the {} model."
raise ValueError(msg.format(to_attr, model.__name__))
# Whether or not we're prefetching the last part of the lookup.
@@ -2014,6 +2231,7 @@
method gets row and from_obj as input and populates the select_related()
model instance.
"""
+
def __init__(self, klass_info, select, db):
self.db = db
# Pre-compute needed attributes. The attributes are:
@@ -2039,32 +2257,40 @@
# - local_setter, remote_setter: Methods to set cached values on
# the object being populated and on the remote object. Usually
# these are Field.set_cached_value() methods.
- select_fields = klass_info['select_fields']
- from_parent = klass_info['from_parent']
+ select_fields = klass_info["select_fields"]
+ from_parent = klass_info["from_parent"]
if not from_parent:
self.cols_start = select_fields[0]
self.cols_end = select_fields[-1] + 1
self.init_list = [
- f[0].target.attname for f in select[self.cols_start:self.cols_end]
+ f[0].target.attname for f in select[self.cols_start : self.cols_end]
]
self.reorder_for_init = None
else:
- attname_indexes = {select[idx][0].target.attname: idx for idx in select_fields}
- model_init_attnames = (f.attname for f in klass_info['model']._meta.concrete_fields)
- self.init_list = [attname for attname in model_init_attnames if attname in attname_indexes]
- self.reorder_for_init = operator.itemgetter(*[attname_indexes[attname] for attname in self.init_list])
+ attname_indexes = {
+ select[idx][0].target.attname: idx for idx in select_fields
+ }
+ model_init_attnames = (
+ f.attname for f in klass_info["model"]._meta.concrete_fields
+ )
+ self.init_list = [
+ attname for attname in model_init_attnames if attname in attname_indexes
+ ]
+ self.reorder_for_init = operator.itemgetter(
+ *[attname_indexes[attname] for attname in self.init_list]
+ )
- self.model_cls = klass_info['model']
+ self.model_cls = klass_info["model"]
self.pk_idx = self.init_list.index(self.model_cls._meta.pk.attname)
self.related_populators = get_related_populators(klass_info, select, self.db)
- self.local_setter = klass_info['local_setter']
- self.remote_setter = klass_info['remote_setter']
+ self.local_setter = klass_info["local_setter"]
+ self.remote_setter = klass_info["remote_setter"]
def populate(self, row, from_obj):
if self.reorder_for_init:
obj_data = self.reorder_for_init(row)
else:
- obj_data = row[self.cols_start:self.cols_end]
+ obj_data = row[self.cols_start : self.cols_end]
if obj_data[self.pk_idx] is None:
obj = None
else:
@@ -2078,7 +2304,7 @@
def get_related_populators(klass_info, select, db):
iterators = []
- related_klass_infos = klass_info.get('related_klass_infos', [])
+ related_klass_infos = klass_info.get("related_klass_infos", [])
for rel_klass_info in related_klass_infos:
rel_cls = RelatedPopulator(rel_klass_info, select, db)
iterators.append(rel_cls)
diff --git a/docs/1.0-dev/_modules/django/db/models/query_utils.html b/docs/1.0-dev/_modules/django/db/models/query_utils.html
index 8bf852d1f5..16997a4fc3 100644
--- a/docs/1.0-dev/_modules/django/db/models/query_utils.html
+++ b/docs/1.0-dev/_modules/django/db/models/query_utils.html
@@ -58,7 +58,10 @@
# PathInfo is used when converting lookups (fk__somecol). The contents
# describe the relation in Model terms (model Options and Fields for both
# sides of the relation. The join_field is the field backing the relation.
-PathInfo = namedtuple('PathInfo', 'from_opts to_opts target_fields join_field m2m direct filtered_relation')
+PathInfo = namedtuple(
+ "PathInfo",
+ "from_opts to_opts target_fields join_field m2m direct filtered_relation",
+)
def subclasses(cls):
@@ -72,21 +75,26 @@
Encapsulate filters as objects that can then be combined logically (using
`&` and `|`).
"""
+
# Connection types
- AND = 'AND'
- OR = 'OR'
+ AND = "AND"
+ OR = "OR"
default = AND
conditional = True
def __init__(self, *args, _connector=None, _negated=False, **kwargs):
- super().__init__(children=[*args, *sorted(kwargs.items())], connector=_connector, negated=_negated)
+ super().__init__(
+ children=[*args, *sorted(kwargs.items())],
+ connector=_connector,
+ negated=_negated,
+ )
def _combine(self, other, conn):
- if not(isinstance(other, Q) or getattr(other, 'conditional', False) is True):
+ if not (isinstance(other, Q) or getattr(other, "conditional", False) is True):
raise TypeError(other)
if not self:
- return other.copy() if hasattr(other, 'copy') else copy.copy(other)
+ return other.copy() if hasattr(other, "copy") else copy.copy(other)
elif isinstance(other, Q) and not other:
_, args, kwargs = self.deconstruct()
return type(self)(*args, **kwargs)
@@ -109,26 +117,31 @@
obj.negate()
return obj
- def resolve_expression(self, query=None, allow_joins=True, reuse=None, summarize=False, for_save=False):
+ def resolve_expression(
+ self, query=None, allow_joins=True, reuse=None, summarize=False, for_save=False
+ ):
# We must promote any new joins to left outer joins so that when Q is
# used as an expression, rows aren't filtered due to joins.
clause, joins = query._add_q(
- self, reuse, allow_joins=allow_joins, split_subq=False,
+ self,
+ reuse,
+ allow_joins=allow_joins,
+ split_subq=False,
check_filterable=False,
)
query.promote_joins(joins)
return clause
def deconstruct(self):
- path = '%s.%s' % (self.__class__.__module__, self.__class__.__name__)
- if path.startswith('django.db.models.query_utils'):
- path = path.replace('django.db.models.query_utils', 'django.db.models')
+ path = "%s.%s" % (self.__class__.__module__, self.__class__.__name__)
+ if path.startswith("django.db.models.query_utils"):
+ path = path.replace("django.db.models.query_utils", "django.db.models")
args = tuple(self.children)
kwargs = {}
if self.connector != self.default:
- kwargs['_connector'] = self.connector
+ kwargs["_connector"] = self.connector
if self.negated:
- kwargs['_negated'] = True
+ kwargs["_negated"] = True
return path, args, kwargs
@@ -137,6 +150,7 @@
A wrapper for a deferred-loading field. When the value is read from this
object the first time, the query is executed.
"""
+
def __init__(self, field):
self.field = field
@@ -173,7 +187,6 @@
class RegisterLookupMixin:
-
@classmethod
def _get_lookup(cls, lookup_name):
return cls.get_lookups().get(lookup_name, None)
@@ -181,13 +194,16 @@
@classmethod
@functools.lru_cache(maxsize=None)
def get_lookups(cls):
- class_lookups = [parent.__dict__.get('class_lookups', {}) for parent in inspect.getmro(cls)]
+ class_lookups = [
+ parent.__dict__.get("class_lookups", {}) for parent in inspect.getmro(cls)
+ ]
return cls.merge_dicts(class_lookups)
def get_lookup(self, lookup_name):
from django.db.models.lookups import Lookup
+
found = self._get_lookup(lookup_name)
- if found is None and hasattr(self, 'output_field'):
+ if found is None and hasattr(self, "output_field"):
return self.output_field.get_lookup(lookup_name)
if found is not None and not issubclass(found, Lookup):
return None
@@ -195,8 +211,9 @@
def get_transform(self, lookup_name):
from django.db.models.lookups import Transform
+
found = self._get_lookup(lookup_name)
- if found is None and hasattr(self, 'output_field'):
+ if found is None and hasattr(self, "output_field"):
return self.output_field.get_transform(lookup_name)
if found is not None and not issubclass(found, Transform):
return None
@@ -222,7 +239,7 @@
def register_lookup(cls, lookup, lookup_name=None):
if lookup_name is None:
lookup_name = lookup.lookup_name
- if 'class_lookups' not in cls.__dict__:
+ if "class_lookups" not in cls.__dict__:
cls.class_lookups = {}
cls.class_lookups[lookup_name] = lookup
cls._clear_cached_lookups()
@@ -269,8 +286,8 @@
if field.attname not in load_fields:
if restricted and field.name in requested:
msg = (
- 'Field %s.%s cannot be both deferred and traversed using '
- 'select_related at the same time.'
+ "Field %s.%s cannot be both deferred and traversed using "
+ "select_related at the same time."
) % (field.model._meta.object_name, field.name)
raise FieldError(msg)
return True
@@ -296,12 +313,14 @@
1) model and opts match (where proxy inheritance is removed)
2) model is parent of opts' model or the other way around
"""
+
def check(opts):
return (
- model._meta.concrete_model == opts.concrete_model or
- opts.concrete_model in model._meta.get_parent_list() or
- model in opts.get_parent_list()
+ model._meta.concrete_model == opts.concrete_model
+ or opts.concrete_model in model._meta.get_parent_list()
+ or model in opts.get_parent_list()
)
+
# If the field is a primary key, then doing a query against the field's
# model is ok, too. Consider the case:
# class Restaurant(models.Model):
@@ -311,9 +330,8 @@
# give Place's opts as the target opts, but Restaurant isn't compatible
# with that. This logic applies only to primary keys, as when doing __in=qs,
# we are going to turn this into __in=qs.values('pk') later on.
- return (
- check(target_opts) or
- (getattr(field, 'primary_key', False) and check(field.model._meta))
+ return check(target_opts) or (
+ getattr(field, "primary_key", False) and check(field.model._meta)
)
@@ -322,11 +340,11 @@
def __init__(self, relation_name, *, condition=Q()):
if not relation_name:
- raise ValueError('relation_name cannot be empty.')
+ raise ValueError("relation_name cannot be empty.")
self.relation_name = relation_name
self.alias = None
if not isinstance(condition, Q):
- raise ValueError('condition argument must be a Q() instance.')
+ raise ValueError("condition argument must be a Q() instance.")
self.condition = condition
self.path = []
@@ -334,9 +352,9 @@
if not isinstance(other, self.__class__):
return NotImplemented
return (
- self.relation_name == other.relation_name and
- self.alias == other.alias and
- self.condition == other.condition
+ self.relation_name == other.relation_name
+ and self.alias == other.alias
+ and self.condition == other.condition
)
def clone(self):
@@ -350,7 +368,7 @@
QuerySet.annotate() only accepts expression-like arguments
(with a resolve_expression() method).
"""
- raise NotImplementedError('FilteredRelation.resolve_expression() is unused.')
+ raise NotImplementedError("FilteredRelation.resolve_expression() is unused.")
def as_sql(self, compiler, connection):
# Resolve the condition in Join.filtered_relation.
diff --git a/docs/1.0-dev/_modules/django/utils/functional.html b/docs/1.0-dev/_modules/django/utils/functional.html
index 5cef171206..f4e3ea2eaa 100644
--- a/docs/1.0-dev/_modules/django/utils/functional.html
+++ b/docs/1.0-dev/_modules/django/utils/functional.html
@@ -55,18 +55,19 @@
The optional ``name`` argument is obsolete as of Python 3.6 and will be
deprecated in Django 4.0 (#30127).
"""
+
name = None
@staticmethod
def func(instance):
raise TypeError(
- 'Cannot use cached_property instance without calling '
- '__set_name__() on it.'
+ "Cannot use cached_property instance without calling "
+ "__set_name__() on it."
)
def __init__(self, func, name=None):
self.real_func = func
- self.__doc__ = getattr(func, '__doc__')
+ self.__doc__ = getattr(func, "__doc__")
def __set_name__(self, owner, name):
if self.name is None:
@@ -95,6 +96,7 @@
Decorator that converts a method with a single cls argument into a property
that can be accessed directly from the class.
"""
+
def __init__(self, method=None):
self.fget = method
@@ -111,6 +113,7 @@
Base class for the proxy class created in the closure of the lazy function.
It's used to recognize promises in code.
"""
+
pass
@@ -129,6 +132,7 @@
called on the result of that function. The function is not evaluated
until one of the methods on the result is called.
"""
+
__prepared = False
def __init__(self, args, kw):
@@ -141,7 +145,7 @@
def __reduce__(self):
return (
_lazy_proxy_unpickle,
- (func, self.__args, self.__kw) + resultclasses
+ (func, self.__args, self.__kw) + resultclasses,
)
def __repr__(self):
@@ -162,7 +166,7 @@
cls._delegate_text = str in resultclasses
if cls._delegate_bytes and cls._delegate_text:
raise ValueError(
- 'Cannot call lazy() with both bytes and text return types.'
+ "Cannot call lazy() with both bytes and text return types."
)
if cls._delegate_text:
cls.__str__ = cls.__text_cast
@@ -177,6 +181,7 @@
# applies the given magic method of the result type.
res = func(*self.__args, **self.__kw)
return getattr(res, method_name)(*args, **kw)
+
return __wrapper__
def __text_cast(self):
@@ -266,10 +271,15 @@
@wraps(func)
def wrapper(*args, **kwargs):
- if any(isinstance(arg, Promise) for arg in itertools.chain(args, kwargs.values())):
+ if any(
+ isinstance(arg, Promise)
+ for arg in itertools.chain(args, kwargs.values())
+ ):
return lazy_func(*args, **kwargs)
return func(*args, **kwargs)
+
return wrapper
+
return decorator
@@ -288,6 +298,7 @@
if self._wrapped is empty:
self._setup()
return func(self._wrapped, *args)
+
return inner
@@ -330,7 +341,9 @@
"""
Must be implemented by subclasses to initialize the wrapped object.
"""
- raise NotImplementedError('subclasses of LazyObject must provide a _setup() method')
+ raise NotImplementedError(
+ "subclasses of LazyObject must provide a _setup() method"
+ )
# Because we have messed with __class__ below, we confuse pickle as to what
# class we are pickling. We're going to have to initialize the wrapped
@@ -409,6 +422,7 @@
Designed for compound objects of unknown type. For builtins or objects of
known type, use django.utils.functional.lazy.
"""
+
def __init__(self, func):
"""
Pass in a callable that returns the object to be wrapped.
@@ -418,7 +432,7 @@
callable can be safely run more than once and will return the same
value.
"""
- self.__dict__['_setupfunc'] = func
+ self.__dict__["_setupfunc"] = func
super().__init__()
def _setup(self):
@@ -431,7 +445,7 @@
repr_attr = self._setupfunc
else:
repr_attr = self._wrapped
- return '<%s: %r>' % (type(self).__name__, repr_attr)
+ return "<%s: %r>" % (type(self).__name__, repr_attr)
def __copy__(self):
if self._wrapped is empty:
diff --git a/docs/1.0-dev/_modules/evennia/commands/default/building.html b/docs/1.0-dev/_modules/evennia/commands/default/building.html
index 5ec06cf046..9f7b1df954 100644
--- a/docs/1.0-dev/_modules/evennia/commands/default/building.html
+++ b/docs/1.0-dev/_modules/evennia/commands/default/building.html
@@ -3011,7 +3011,7 @@
# we are only interested in specific attributes
attrs = [attr for attr in obj.db_attributes.all() if attr.db_key in obj_attrs]
if not attrs:
- self.msg("No attributes found on {obj.name}.")
+ self.msg(f"No attributes found on {obj.name}.")
else:
out_strings = []
for attr in attrs:
diff --git a/docs/1.0-dev/_modules/evennia/contrib/game_systems/puzzles/tests.html b/docs/1.0-dev/_modules/evennia/contrib/game_systems/puzzles/tests.html
index 6ed20fbd27..c2a65c66b2 100644
--- a/docs/1.0-dev/_modules/evennia/contrib/game_systems/puzzles/tests.html
+++ b/docs/1.0-dev/_modules/evennia/contrib/game_systems/puzzles/tests.html
@@ -425,7 +425,7 @@
self._use(
"steel-1, flint", "You try to utilize these but nothing happens ... something amiss?"
)
- self._use("steel-1, flint, red steel, steel-3", "You are a Genius")
+ self._use("steel-1, flint, red steel, steel-2", "You are a Genius")
self._check_room_contents({"smoke": 1, "fire": 1})
_box_all()
diff --git a/docs/1.0-dev/_modules/evennia/contrib/rpg/traits/tests.html b/docs/1.0-dev/_modules/evennia/contrib/rpg/traits/tests.html
index 94a8df5ae1..3c51abf06d 100644
--- a/docs/1.0-dev/_modules/evennia/contrib/rpg/traits/tests.html
+++ b/docs/1.0-dev/_modules/evennia/contrib/rpg/traits/tests.html
@@ -328,13 +328,14 @@
trait_type="static",
base=1,
mod=2,
+ mult=1.0,
extra_val1="xvalue1",
extra_val2="xvalue2",
)
self.trait = self.traithandler.get("test1")
def _get_values(self):
- return self.trait.base, self.trait.mod, self.trait.value
+ return self.trait.base, self.trait.mod, self.trait.mult, self.trait.value
[docs] def test_init(self):
self.assertEqual(
@@ -344,25 +345,34 @@
"trait_type": "static",
"base": 1,
"mod": 2,
+ "mult": 1.0,
"extra_val1": "xvalue1",
"extra_val2": "xvalue2",
},
)
[docs] def test_value(self):
- """value is base + mod"""
- self.assertEqual(self._get_values(), (1, 2, 3))
+ """value is (base + mod) * mult"""
+ self.assertEqual(self._get_values(), (1, 2, 1.0, 3))
self.trait.base += 4
- self.assertEqual(self._get_values(), (5, 2, 7))
+ self.assertEqual(self._get_values(), (5, 2, 1.0, 7))
self.trait.mod -= 1
- self.assertEqual(self._get_values(), (5, 1, 6))
+ self.assertEqual(self._get_values(), (5, 1, 1.0, 6))
+ self.trait.mult += 1.0
+ self.assertEqual(self._get_values(), (5, 1, 2.0, 12))
+ self.trait.mult = 0.75
+ self.assertEqual(self._get_values(), (5, 1, 0.75, 4.5))
+
[docs] def test_delete(self):
"""Deleting resets to default."""
+ self.trait.mult = 2.0
del self.trait.base
- self.assertEqual(self._get_values(), (0, 2, 2))
+ self.assertEqual(self._get_values(), (0, 2, 2.0, 4))
+ del self.trait.mult
+ self.assertEqual(self._get_values(), (0, 2, 1.0, 2))
del self.trait.mod
- self.assertEqual(self._get_values(), (0, 0, 0))
+ self.assertEqual(self._get_values(), (0, 0, 1.0, 0))
[docs]class TestTraitCounter(_TraitHandlerBase):
@@ -378,6 +388,7 @@
trait_type="counter",
base=1,
mod=2,
+ mult=1.0,
min=0,
max=10,
extra_val1="xvalue1",
@@ -392,8 +403,8 @@
self.trait = self.traithandler.get("test1")
def _get_values(self):
- """Get (base, mod, value, min, max)."""
- return (self.trait.base, self.trait.mod, self.trait.value, self.trait.min, self.trait.max)
+ """Get (base, mod, mult, value, min, max)."""
+ return (self.trait.base, self.trait.mod, self.trait.mult, self.trait.value, self.trait.min, self.trait.max)
[docs] def test_init(self):
self.assertEqual(
@@ -403,6 +414,7 @@
"trait_type": "counter",
"base": 1,
"mod": 2,
+ "mult": 1.0,
"min": 0,
"max": 10,
"extra_val1": "xvalue1",
@@ -420,102 +432,105 @@
)
[docs] def test_value(self):
- """value is current + mod, where current defaults to base"""
- self.assertEqual(self._get_values(), (1, 2, 3, 0, 10))
+ """value is (current + mod) * mult, where current defaults to base"""
+ self.assertEqual(self._get_values(), (1, 2, 1.0, 3, 0, 10))
self.trait.base += 4
- self.assertEqual(self._get_values(), (5, 2, 7, 0, 10))
+ self.assertEqual(self._get_values(), (5, 2, 1.0, 7, 0, 10))
self.trait.mod -= 1
- self.assertEqual(self._get_values(), (5, 1, 6, 0, 10))
+ self.assertEqual(self._get_values(), (5, 1, 1.0, 6, 0, 10))
+ self.trait.mult += 1.0
+ self.assertEqual(self._get_values(), (5, 1, 2.0, 10, 0, 10))
[docs] def test_boundaries__minmax(self):
"""Test range"""
# should not exceed min/max values
self.trait.base += 20
- self.assertEqual(self._get_values(), (8, 2, 10, 0, 10))
+ self.assertEqual(self._get_values(), (8, 2, 1.0, 10, 0, 10))
self.trait.base = 100
- self.assertEqual(self._get_values(), (8, 2, 10, 0, 10))
+ self.assertEqual(self._get_values(), (8, 2, 1.0, 10, 0, 10))
self.trait.base -= 40
- self.assertEqual(self._get_values(), (-2, 2, 0, 0, 10))
+ self.assertEqual(self._get_values(), (-2, 2, 1.0, 0, 0, 10))
self.trait.base = -100
- self.assertEqual(self._get_values(), (-2, 2, 0, 0, 10))
+ self.assertEqual(self._get_values(), (-2, 2, 1.0, 0, 0, 10))
[docs] def test_boundaries__bigmod(self):
"""add a big mod"""
self.trait.base = 5
self.trait.mod = 100
- self.assertEqual(self._get_values(), (5, 5, 10, 0, 10))
+ self.assertEqual(self._get_values(), (5, 5, 1.0, 10, 0, 10))
self.trait.mod = -100
- self.assertEqual(self._get_values(), (5, -5, 0, 0, 10))
+ self.assertEqual(self._get_values(), (5, -5, 1.0, 0, 0, 10))
[docs] def test_boundaries__change_boundaries(self):
"""Change boundaries after base/mod change"""
self.trait.base = 5
self.trait.mod = -100
self.trait.min = -20
- self.assertEqual(self._get_values(), (5, -5, 0, -20, 10))
+ self.assertEqual(self._get_values(), (5, -5, 1.0, 0, -20, 10))
self.trait.mod -= 100
- self.assertEqual(self._get_values(), (5, -25, -20, -20, 10))
+ self.assertEqual(self._get_values(), (5, -25, 1.0, -20, -20, 10))
self.trait.mod = 100
self.trait.max = 20
- self.assertEqual(self._get_values(), (5, 5, 10, -20, 20))
+ self.assertEqual(self._get_values(), (5, 5, 1.0, 10, -20, 20))
self.trait.mod = 100
- self.assertEqual(self._get_values(), (5, 15, 20, -20, 20))
+ self.assertEqual(self._get_values(), (5, 15, 1.0, 20, -20, 20))
[docs] def test_boundaries__disable(self):
"""Disable and re-enable boundaries"""
self.trait.base = 5
self.trait.mod = 100
- self.assertEqual(self._get_values(), (5, 5, 10, 0, 10))
+ self.assertEqual(self._get_values(), (5, 5, 1.0, 10, 0, 10))
del self.trait.max
self.assertEqual(self.trait.max, None)
del self.trait.min
self.assertEqual(self.trait.min, None)
self.trait.base = 100
- self.assertEqual(self._get_values(), (100, 5, 105, None, None))
+ self.assertEqual(self._get_values(), (100, 5, 1.0, 105, None, None))
self.trait.base = -200
- self.assertEqual(self._get_values(), (-200, 5, -195, None, None))
+ self.assertEqual(self._get_values(), (-200, 5, 1.0, -195, None, None))
# re-activate boundaries
self.trait.max = 15
self.trait.min = 10 # his is blocked since base+mod is lower
- self.assertEqual(self._get_values(), (-200, 5, -195, -195, 15))
+ self.assertEqual(self._get_values(), (-200, 5, 1.0, -195, -195, 15))
[docs] def test_boundaries__inverse(self):
"""Set inverse boundaries - limited by base"""
self.trait.mod = 0
- self.assertEqual(self._get_values(), (1, 0, 1, 0, 10))
+ self.assertEqual(self._get_values(), (1, 0, 1.0, 1, 0, 10))
self.trait.min = 20 # will be set to base
- self.assertEqual(self._get_values(), (1, 0, 1, 1, 10))
+ self.assertEqual(self._get_values(), (1, 0, 1.0, 1, 1, 10))
self.trait.max = -20
- self.assertEqual(self._get_values(), (1, 0, 1, 1, 1))
+ self.assertEqual(self._get_values(), (1, 0, 1.0, 1, 1, 1))
[docs] def test_current(self):
"""Modifying current value"""
self.trait.current = 5
- self.assertEqual(self._get_values(), (1, 2, 7, 0, 10))
+ self.assertEqual(self._get_values(), (1, 2, 1.0, 7, 0, 10))
self.trait.current = 10
- self.assertEqual(self._get_values(), (1, 2, 10, 0, 10))
+ self.assertEqual(self._get_values(), (1, 2, 1.0, 10, 0, 10))
self.trait.current = 12
- self.assertEqual(self._get_values(), (1, 2, 10, 0, 10))
+ self.assertEqual(self._get_values(), (1, 2, 1.0, 10, 0, 10))
self.trait.current = -1
- self.assertEqual(self._get_values(), (1, 2, 2, 0, 10))
+ self.assertEqual(self._get_values(), (1, 2, 1.0, 2, 0, 10))
self.trait.current -= 10
- self.assertEqual(self._get_values(), (1, 2, 2, 0, 10))
+ self.assertEqual(self._get_values(), (1, 2, 1.0, 2, 0, 10))
[docs] def test_delete(self):
"""Deleting resets to default."""
del self.trait.base
- self.assertEqual(self._get_values(), (0, 2, 2, 0, 10))
+ self.assertEqual(self._get_values(), (0, 2, 1.0, 2, 0, 10))
del self.trait.mod
- self.assertEqual(self._get_values(), (0, 0, 0, 0, 10))
+ self.assertEqual(self._get_values(), (0, 0, 1.0, 0, 0, 10))
del self.trait.min
del self.trait.max
- self.assertEqual(self._get_values(), (0, 0, 0, None, None))
+ self.assertEqual(self._get_values(), (0, 0, 1.0, 0, None, None))
[docs] def test_percentage(self):
"""Test percentage calculation"""
self.trait.base = 8
self.trait.mod = 2
+ self.trait.mult = 1.0
self.trait.min = 0
self.trait.max = 10
self.assertEqual(self.trait.percent(), "100.0%")
@@ -536,7 +551,7 @@
"""Test descriptions"""
self.trait.min = -5
self.trait.mod = 0
- self.assertEqual(self._get_values(), (1, 0, 1, -5, 10))
+ self.assertEqual(self._get_values(), (1, 0, 1.0, 1, -5, 10))
self.trait.current = -2
self.assertEqual(self.trait.desc(), "range0")
self.trait.current = 0
@@ -567,6 +582,7 @@
trait_type="counter",
base=1,
mod=2,
+ mult=1.0,
min=0,
max=100,
extra_val1="xvalue1",
@@ -644,8 +660,9 @@
"test1",
name="Test1",
trait_type="gauge",
- base=8, # max = base + mod
+ base=8, # max = (base + mod) * mult
mod=2,
+ mult=1.0,
extra_val1="xvalue1",
extra_val2="xvalue2",
descs={
@@ -658,8 +675,8 @@
self.trait = self.traithandler.get("test1")
def _get_values(self):
- """Get (base, mod, value, min, max)."""
- return (self.trait.base, self.trait.mod, self.trait.value, self.trait.min, self.trait.max)
+ """Get (base, mod, mult, value, min, max)."""
+ return (self.trait.base, self.trait.mod, self.trait.mult, self.trait.value, self.trait.min, self.trait.max)
[docs] def test_init(self):
self.assertEqual(
@@ -669,6 +686,7 @@
"trait_type": "gauge",
"base": 8,
"mod": 2,
+ "mult": 1.0,
"min": 0,
"extra_val1": "xvalue1",
"extra_val2": "xvalue2",
@@ -687,71 +705,80 @@
[docs] def test_value(self):
"""value is current, where current defaults to base + mod"""
# current unset - follows base + mod
- self.assertEqual(self._get_values(), (8, 2, 10, 0, 10))
+ self.assertEqual(self._get_values(), (8, 2, 1.0, 10, 0, 10))
self.trait.base += 4
- self.assertEqual(self._get_values(), (12, 2, 14, 0, 14))
+ self.assertEqual(self._get_values(), (12, 2, 1.0, 14, 0, 14))
self.trait.mod -= 1
- self.assertEqual(self._get_values(), (12, 1, 13, 0, 13))
+ self.assertEqual(self._get_values(), (12, 1, 1.0, 13, 0, 13))
+ self.trait.mult += 1.0
+ self.assertEqual(self._get_values(), (12, 1, 2.0, 26, 0, 26))
# set current, decouple from base + mod
self.trait.current = 5
- self.assertEqual(self._get_values(), (12, 1, 5, 0, 13))
+ self.assertEqual(self._get_values(), (12, 1, 2.0, 5, 0, 26))
self.trait.mod += 1
self.trait.base -= 4
- self.assertEqual(self._get_values(), (8, 2, 5, 0, 10))
+ self.trait.mult -= 1.0
+ self.assertEqual(self._get_values(), (8, 2, 1.0, 5, 0, 10))
self.trait.min = -100
self.trait.base = -20
- self.assertEqual(self._get_values(), (-20, 2, -18, -100, -18))
+ self.assertEqual(self._get_values(), (-20, 2, 1.0, -18, -100, -18))
[docs] def test_boundaries__minmax(self):
"""Test range"""
# current unset - tied to base + mod
self.trait.base += 20
- self.assertEqual(self._get_values(), (28, 2, 30, 0, 30))
+ self.assertEqual(self._get_values(), (28, 2, 1.0, 30, 0, 30))
# set current - decouple from base + mod
self.trait.current = 19
- self.assertEqual(self._get_values(), (28, 2, 19, 0, 30))
+ self.assertEqual(self._get_values(), (28, 2, 1.0, 19, 0, 30))
# test upper bound
self.trait.current = 100
- self.assertEqual(self._get_values(), (28, 2, 30, 0, 30))
+ self.assertEqual(self._get_values(), (28, 2, 1.0, 30, 0, 30))
+ # with multiplier
+ self.trait.mult = 2.0
+ self.assertEqual(self._get_values(), (28, 2, 2.0, 30, 0, 60))
+ self.trait.current = 100
+ self.assertEqual(self._get_values(), (28, 2, 2.0, 60, 0, 60))
# min defaults to 0
+ self.trait.mult = 1.0
self.trait.current = -10
- self.assertEqual(self._get_values(), (28, 2, 0, 0, 30))
+ self.assertEqual(self._get_values(), (28, 2, 1.0, 0, 0, 30))
self.trait.min = -20
- self.assertEqual(self._get_values(), (28, 2, 0, -20, 30))
+ self.assertEqual(self._get_values(), (28, 2, 1.0, 0, -20, 30))
self.trait.current = -10
- self.assertEqual(self._get_values(), (28, 2, -10, -20, 30))
+ self.assertEqual(self._get_values(), (28, 2, 1.0, -10, -20, 30))
[docs] def test_boundaries__bigmod(self):
"""add a big mod"""
self.trait.base = 5
self.trait.mod = 100
- self.assertEqual(self._get_values(), (5, 100, 105, 0, 105))
+ self.assertEqual(self._get_values(), (5, 100, 1.0, 105, 0, 105))
# restricted by min
self.trait.mod = -100
- self.assertEqual(self._get_values(), (5, -5, 0, 0, 0))
+ self.assertEqual(self._get_values(), (5, -5, 1.0, 0, 0, 0))
self.trait.min = -200
- self.assertEqual(self._get_values(), (5, -5, 0, -200, 0))
+ self.assertEqual(self._get_values(), (5, -5, 1.0, 0, -200, 0))
[docs] def test_boundaries__change_boundaries(self):
"""Change boundaries after current change"""
self.trait.current = 20
- self.assertEqual(self._get_values(), (8, 2, 10, 0, 10))
+ self.assertEqual(self._get_values(), (8, 2, 1.0, 10, 0, 10))
self.trait.mod = 102
- self.assertEqual(self._get_values(), (8, 102, 10, 0, 110))
+ self.assertEqual(self._get_values(), (8, 102, 1.0, 10, 0, 110))
# raising min past current value will force it upwards
self.trait.min = 20
- self.assertEqual(self._get_values(), (8, 102, 20, 20, 110))
+ self.assertEqual(self._get_values(), (8, 102, 1.0, 20, 20, 110))
[docs] def test_boundaries__disable(self):
"""Disable and re-enable boundary"""
self.trait.base = 5
self.trait.min = 1
- self.assertEqual(self._get_values(), (5, 2, 7, 1, 7))
+ self.assertEqual(self._get_values(), (5, 2, 1.0, 7, 1, 7))
del self.trait.min
- self.assertEqual(self._get_values(), (5, 2, 7, 0, 7))
+ self.assertEqual(self._get_values(), (5, 2, 1.0, 7, 0, 7))
del self.trait.base
del self.trait.mod
- self.assertEqual(self._get_values(), (0, 0, 0, 0, 0))
+ self.assertEqual(self._get_values(), (0, 0, 1.0, 0, 0, 0))
with self.assertRaises(traits.TraitException):
del self.trait.max
@@ -759,41 +786,41 @@
"""Try to set reversed boundaries"""
self.trait.mod = 0
self.trait.base = -10 # limited by min
- self.assertEqual(self._get_values(), (0, 0, 0, 0, 0))
+ self.assertEqual(self._get_values(), (0, 0, 1.0, 0, 0, 0))
self.trait.min = -10
- self.assertEqual(self._get_values(), (0, 0, 0, -10, 0))
+ self.assertEqual(self._get_values(), (0, 0, 1.0, 0, -10, 0))
self.trait.base = -10
- self.assertEqual(self._get_values(), (-10, 0, -10, -10, -10))
+ self.assertEqual(self._get_values(), (-10, 0, 1.0, -10, -10, -10))
self.min = 0 # limited by base + mod
- self.assertEqual(self._get_values(), (-10, 0, -10, -10, -10))
+ self.assertEqual(self._get_values(), (-10, 0, 1.0, -10, -10, -10))
[docs] def test_current(self):
"""Modifying current value"""
self.trait.base = 10
self.trait.current = 5
- self.assertEqual(self._get_values(), (10, 2, 5, 0, 12))
+ self.assertEqual(self._get_values(), (10, 2, 1.0, 5, 0, 12))
self.trait.current = 10
- self.assertEqual(self._get_values(), (10, 2, 10, 0, 12))
+ self.assertEqual(self._get_values(), (10, 2, 1.0, 10, 0, 12))
self.trait.current = 12
- self.assertEqual(self._get_values(), (10, 2, 12, 0, 12))
+ self.assertEqual(self._get_values(), (10, 2, 1.0, 12, 0, 12))
self.trait.current = 0
- self.assertEqual(self._get_values(), (10, 2, 0, 0, 12))
+ self.assertEqual(self._get_values(), (10, 2, 1.0, 0, 0, 12))
self.trait.current = -1
- self.assertEqual(self._get_values(), (10, 2, 0, 0, 12))
+ self.assertEqual(self._get_values(), (10, 2, 1.0, 0, 0, 12))
[docs] def test_delete(self):
"""Deleting resets to default."""
del self.trait.mod
- self.assertEqual(self._get_values(), (8, 0, 8, 0, 8))
+ self.assertEqual(self._get_values(), (8, 0, 1.0, 8, 0, 8))
self.trait.mod = 2
del self.trait.base
- self.assertEqual(self._get_values(), (0, 2, 2, 0, 2))
+ self.assertEqual(self._get_values(), (0, 2, 1.0, 2, 0, 2))
del self.trait.min
- self.assertEqual(self._get_values(), (0, 2, 2, 0, 2))
+ self.assertEqual(self._get_values(), (0, 2, 1.0, 2, 0, 2))
self.trait.min = -10
- self.assertEqual(self._get_values(), (0, 2, 2, -10, 2))
+ self.assertEqual(self._get_values(), (0, 2, 1.0, 2, -10, 2))
del self.trait.min
- self.assertEqual(self._get_values(), (0, 2, 2, 0, 2))
+ self.assertEqual(self._get_values(), (0, 2, 1.0, 2, 0, 2))
[docs] def test_percentage(self):
"""Test percentage calculation"""
@@ -808,7 +835,7 @@
[docs] def test_descs(self):
"""Test descriptions"""
self.trait.min = -5
- self.assertEqual(self._get_values(), (8, 2, 10, -5, 10))
+ self.assertEqual(self._get_values(), (8, 2, 1.0, 10, -5, 10))
self.trait.current = -2
self.assertEqual(self.trait.desc(), "range0")
self.trait.current = 0
diff --git a/docs/1.0-dev/_modules/evennia/contrib/rpg/traits/traits.html b/docs/1.0-dev/_modules/evennia/contrib/rpg/traits/traits.html
index 6ed43ffc91..8b86675247 100644
--- a/docs/1.0-dev/_modules/evennia/contrib/rpg/traits/traits.html
+++ b/docs/1.0-dev/_modules/evennia/contrib/rpg/traits/traits.html
@@ -87,7 +87,7 @@
def at_object_creation(self):
# (or wherever you want)
- self.traits.add("str", "Strength", trait_type="static", base=10, mod=2)
+ self.traits.add("str", "Strength", trait_type="static", base=10, mod=2, mult=2.0)
self.traits.add("hp", "Health", trait_type="gauge", min=0, max=100)
self.traits.add("hunting", "Hunting Skill", trait_type="counter",
base=10, mod=1, min=0, max=100)
@@ -116,9 +116,9 @@
class Object(DefaultObject):
...
- strength = TraitProperty("Strength", trait_type="static", base=10, mod=2)
+ strength = TraitProperty("Strength", trait_type="static", base=10, mod=2, mult=1.5)
health = TraitProperty("Health", trait_type="gauge", min=0, base=100, mod=2)
- hunting = TraitProperty("Hunting Skill", trait_type="counter", base=10, mod=1, min=0, max=100)
+ hunting = TraitProperty("Hunting Skill", trait_type="counter", base=10, mod=1, mult=2.0, min=0, max=100)
```
@@ -144,14 +144,14 @@
```python
> obj.traits.strength.value
-12 # base + mod
+18 # (base + mod) * mult
-> obj.traits.strength.base += 5
+> obj.traits.strength.base += 6
obj.traits.strength.value
-17
+27
> obj.traits.hp.value
-102 # base + mod
+102 # (base + mod) * mult
> obj.traits.hp.base -= 200
> obj.traits.hp.value
@@ -173,11 +173,11 @@
# with TraitProperties:
> obj.hunting.value
-12
+22
> obj.strength.value += 5
> obj.strength.value
-17
+32
```
@@ -213,25 +213,27 @@
```
## Static trait
-`value = base + mod`
+`value = (base + mod) * mult`
-The static trait has a `base` value and an optional `mod`-ifier. A typical use
-of a static trait would be a Strength stat or Skill value. That is, something
-that varies slowly or not at all, and which may be modified in-place.
+The static trait has a `base` value and an optional `mod`-ifier and 'mult'-iplier.
+The modifier defaults to 0, and the multiplier to 1.0, for no change in value.
+A typical use of a static trait would be a Strength stat or Skill value. That is,
+somethingthat varies slowly or not at all, and which may be modified in-place.
```python
> obj.traits.add("str", "Strength", trait_type="static", base=10, mod=2)
> obj.traits.mytrait.value
-
12 # base + mod
+
> obj.traits.mytrait.base += 2
> obj.traits.mytrait.mod += 1
> obj.traits.mytrait.value
15
> obj.traits.mytrait.mod = 0
+> obj.traits.mytrait.mult = 2.0
> obj.traits.mytrait.value
-12
+20
```
@@ -241,18 +243,20 @@
min/unset base base+mod max/unset
|--------------|--------|---------X--------X------------|
current value
- = current
- + mod
+ = (current
+ + mod)
+ * mult
A counter describes a value that can move from a base. The `.current` property
is the thing usually modified. It starts at the `.base`. One can also add a
-modifier, which will both be added to the base and to current (forming
-`.value`). The min/max of the range are optional, a boundary set to None will
-remove it. A suggested use for a Counter Trait would be to track skill values.
+modifier, which is added to both the base and to current. '.value' is then formed
+by multiplying by the multiplier, which defaults to 1.0 for no change. The min/max
+of the range are optional, a boundary set to None will remove it. A suggested use
+for a Counter Trait would be to track skill values.
```python
> obj.traits.add("hunting", "Hunting Skill", trait_type="counter",
- base=10, mod=1, min=0, max=100)
+ base=10, mod=1, mult=1.0, min=0, max=100)
> obj.traits.hunting.value
11 # current starts at base + mod
@@ -264,7 +268,10 @@
> del obj.traits.hunting.current
> obj.traits.hunting.value
11
+
> obj.traits.hunting.max = None # removing upper bound
+> obj.traits.hunting.mult = 100.0
+1100
# for TraitProperties, pass the args/kwargs of traits.add() to the
# TraitProperty constructor instead.
@@ -1183,20 +1190,20 @@
[docs]class StaticTrait(Trait):
"""
- Static Trait. This is a single value with a modifier,
- with no concept of a 'current' value or min/max etc.
+ Static Trait. This is a single value with a modifier,
+ multiplier, and no concept of a 'current' value or min/max etc.
- value = base + mod
+ value = (base + mod) * mult
"""
trait_type = "static"
- default_keys = {"base": 0, "mod": 0}
+ default_keys = {"base": 0, "mod": 0, "mult": 1.0}
def __str__(self):
status = "{value:11}".format(value=self.value)
- return "{name:12} {status} ({mod:+3})".format(name=self.name, status=status, mod=self.mod)
+ return "{name:12} {status} ({mod:+3}) (* {mult:.2f})".format(name=self.name, status=status, mod=self.mod, mult=self.mult)
# Helpers
@property
@@ -1220,10 +1227,24 @@
if type(amount) in (int, float):
self._data["mod"] = amount
+ @property
+ def mult(self):
+ """The trait's multiplier."""
+ return self._data["mult"]
+
+ @mult.setter
+ def mult(self, amount):
+ if type(amount) in (int, float):
+ self._data["mult"] = amount
+
+ @mult.deleter
+ def mult(self):
+ self._data["mult"] = 1.0
+
@property
def value(self):
- "The value of the Trait"
- return self.base + self.mod
+ "The value of the Trait."
+ return (self.base + self.mod) * self.mult
[docs]class CounterTrait(Trait):
@@ -1233,13 +1254,14 @@
This includes modifications and min/max limits as well as the notion of a
current value. The value can also be reset to the base value.
- min/unset base base+mod max/unset
+ min/unset base (base+mod)*mult max/unset
|--------------|--------|---------X--------X------------|
current value
- = current
- + mod
+ = (current
+ + mod)
+ * mult
- - value = current + mod, starts at base + mod
+ - value = (current + mod) * mult, starts at (base + mod) * mult
- if min or max is None, there is no upper/lower bound (default)
- if max is set to "base", max will be equal ot base+mod
- descs are used to optionally describe each value interval.
@@ -1266,6 +1288,7 @@
default_keys = {
"base": 0,
"mod": 0,
+ "mult": 1.0,
"min": None,
"max": None,
"descs": None,
@@ -1341,16 +1364,16 @@
now = time()
tdiff = now - self._data["last_update"]
current += rate * tdiff
- value = current + self.mod
+ value = (current + self.mod)
# we must make sure so we don't overstep our bounds
# even if .mod is included
if self._passed_ratetarget(value):
- current = self._data["ratetarget"] - self.mod
+ current = (self._data["ratetarget"] - self.mod)
self._stop_timer()
elif not self._within_boundaries(value):
- current = self._enforce_boundaries(value) - self.mod
+ current = (self._enforce_boundaries(value) - self.mod)
self._stop_timer()
else:
self._data["last_update"] = now
@@ -1394,6 +1417,19 @@
value = self.max - self.base
self._data["mod"] = value
+ @property
+ def mult(self):
+ return self._data["mult"]
+
+ @mult.setter
+ def mult(self, amount):
+ if type(amount) in (int, float):
+ self._data["mult"] = amount
+
+ @mult.deleter
+ def mult(self):
+ self._data["mult"] = 1.0
+
@property
def min(self):
return self._data["min"]
@@ -1424,7 +1460,7 @@
@property
def current(self):
- """The `current` value of the `Trait`. This does not have .mod added."""
+ """The `current` value of the `Trait`. This does not have .mod added and is not .mult-iplied."""
return self._update_current(self._data.get("current", self.base))
@current.setter
@@ -1439,8 +1475,8 @@
@property
def value(self):
- "The value of the Trait (current + mod)"
- return self._enforce_boundaries(self.current + self.mod)
+ "The value of the Trait. (current + mod) * mult"
+ return self._enforce_boundaries((self.current + self.mod) * self.mult)
@property
def ratetarget(self):
@@ -1503,15 +1539,15 @@
"""
Gauge Trait.
- This emulates a gauge-meter that empties from a base+mod value.
+ This emulates a gauge-meter that empties from a (base+mod) * mult value.
- min/0 max=base+mod
+ min/0 max=(base+mod)*mult
|-----------------------X---------------------------|
value
= current
- min defaults to 0
- - max value is always base + mad
+ - max value is always (base + mod) * mult
- .max is an alias of .base
- value = current and varies from min to max.
- descs is a mapping {upper_bound_inclusive: desc}. These
@@ -1535,6 +1571,7 @@
default_keys = {
"base": 0,
"mod": 0,
+ "mult": 1.0,
"min": 0,
"descs": None,
"rate": 0,
@@ -1572,11 +1609,11 @@
"""Ensures that incoming value falls within trait's range."""
if self.min is not None and value <= self.min:
return self.min
- return min(self.mod + self.base, value)
+ return min((self.mod + self.base) * self.mult, value)
def __str__(self):
status = "{value:4} / {base:4}".format(value=self.value, base=self.base)
- return "{name:12} {status} ({mod:+3})".format(name=self.name, status=status, mod=self.mod)
+ return "{name:12} {status} ({mod:+3}) (* {mult:.2f})".format(name=self.name, status=status, mod=self.mod, mult=self.mult)
@property
def base(self):
@@ -1601,6 +1638,19 @@
if value + self.base < self.min:
value = self.min - self.base
self._data["mod"] = value
+
+ @property
+ def mult(self):
+ return self._data["mult"]
+
+ @mult.setter
+ def mult(self, amount):
+ if type(amount) in (int, float):
+ self._data["mult"] = amount
+
+ @mult.deleter
+ def mult(self):
+ self._data["mult"] = 1.0
@property
def min(self):
@@ -1609,16 +1659,16 @@
@min.setter
def min(self, value):
- """Limit so min can never be greater than base+mod."""
+ """Limit so min can never be greater than (base+mod)*mult."""
if value is None:
self._data["min"] = self.default_keys["min"]
elif type(value) in (int, float):
- self._data["min"] = min(value, self.base + self.mod)
+ self._data["min"] = min(value, (self.base + self.mod) * self.mult)
@property
def max(self):
- "The max is always base + mod."
- return self.base + self.mod
+ "The max is always (base + mod) * mult."
+ return (self.base + self.mod) * self.mult
@max.setter
def max(self, value):
@@ -1636,7 +1686,7 @@
def current(self):
"""The `current` value of the gauge."""
return self._update_current(
- self._enforce_boundaries(self._data.get("current", self.base + self.mod))
+ self._enforce_boundaries(self._data.get("current", (self.base + self.mod) * self.mult))
)
@current.setter
@@ -1647,7 +1697,7 @@
@current.deleter
def current(self):
"Resets current back to 'full'"
- self._data["current"] = self.base + self.mod
+ self._data["current"] = (self.base + self.mod) * self.mult
@property
def value(self):
diff --git a/docs/1.0-dev/_modules/evennia/locks/lockfuncs.html b/docs/1.0-dev/_modules/evennia/locks/lockfuncs.html
index 8aa364ccd1..7c0bb0cdcb 100644
--- a/docs/1.0-dev/_modules/evennia/locks/lockfuncs.html
+++ b/docs/1.0-dev/_modules/evennia/locks/lockfuncs.html
@@ -524,15 +524,19 @@
only when out of character.
"""
obj = accessed_obj.obj if hasattr(accessed_obj, "obj") else accessed_obj
- session = obj.session if hasattr(obj, "session") else obj
+ account = obj.account if hasattr(obj, "account") else obj
+ if not account:
+ return True
try:
- return not obj.get_puppet(session)
+ session = accessed_obj.session
except AttributeError:
- try:
- return not obj.account.get_puppet(session)
- except AttributeError:
- pass
- return False
+ session = account.sessions.get()[0] # note-this doesn't work well
+ # for high multisession mode. We may need
+ # to change to sessiondb to resolve this
+ try:
+ return not account.get_puppet(session)
+ except TypeError:
+ return not session.get_puppet()
[docs]def objtag(accessing_obj, accessed_obj, *args, **kwargs):
"""
diff --git a/docs/1.0-dev/_modules/evennia/objects/manager.html b/docs/1.0-dev/_modules/evennia/objects/manager.html
index 16e2617ff8..f2248ca99a 100644
--- a/docs/1.0-dev/_modules/evennia/objects/manager.html
+++ b/docs/1.0-dev/_modules/evennia/objects/manager.html
@@ -518,13 +518,19 @@
# query - if so, strip it.
match = _MULTIMATCH_REGEX.match(str(searchdata))
match_number = None
+ stripped_searchdata = searchdata
if match:
# strips the number
- match_number, searchdata = match.group("number"), match.group("name")
+ match_number, stripped_searchdata = match.group("number"), match.group("name")
match_number = int(match_number) - 1
- if match_number is not None or not exact:
- # run search again, with the exactness set by call
- matches = _searcher(searchdata, candidates, typeclass, exact=exact)
+ if match_number is not None:
+ # run search against the stripped data
+ matches = _searcher(stripped_searchdata, candidates, typeclass, exact=True)
+ if not matches:
+ # final chance to get a looser match against the number-strippped query
+ matches = _searcher(stripped_searchdata, candidates, typeclass, exact=False)
+ elif not exact:
+ matches = _searcher(searchdata, candidates, typeclass, exact=False)
# deal with result
if len(matches) == 1 and match_number is not None and match_number != 0:
diff --git a/docs/1.0-dev/_modules/evennia/objects/objects.html b/docs/1.0-dev/_modules/evennia/objects/objects.html
index 7315663af0..96d5a87c31 100644
--- a/docs/1.0-dev/_modules/evennia/objects/objects.html
+++ b/docs/1.0-dev/_modules/evennia/objects/objects.html
@@ -2278,7 +2278,8 @@
# Will be formatted with the appropriate attributes.
lockstring = (
"puppet:id({character_id}) or pid({account_id}) or perm(Developer) or pperm(Developer);"
- "delete:id({account_id}) or perm(Admin)"
+ "delete:id({account_id}) or perm(Admin);"
+ "edit:pid({account_id}) or perm(Admin)"
)
[docs] @classmethod
diff --git a/docs/1.0-dev/_modules/evennia/typeclasses/attributes.html b/docs/1.0-dev/_modules/evennia/typeclasses/attributes.html
index 12c216749b..e4e8ac0eb8 100644
--- a/docs/1.0-dev/_modules/evennia/typeclasses/attributes.html
+++ b/docs/1.0-dev/_modules/evennia/typeclasses/attributes.html
@@ -707,13 +707,14 @@
self._set_cache(key, category, attr)
return attr
-[docs] def do_update_attribute(self, attr, value):
+[docs] def do_update_attribute(self, attr, value, strvalue):
"""
Simply sets a new Value to an Attribute.
Args:
attr (IAttribute): The Attribute being changed.
value (obj): The Value for the Attribute.
+ strvalue (bool): If True, `value` is expected to be a string.
"""
raise NotImplementedError()
@@ -815,15 +816,16 @@
self._delete_cache(attr.key, attr.category)
self.do_delete_attribute(attr)
-[docs] def update_attribute(self, attr, value):
+[docs] def update_attribute(self, attr, value, strattr=False):
"""
Simply updates an Attribute.
Args:
attr (IAttribute): The attribute to delete.
value (obj): The new value.
+ strattr (bool): If set, the `value` is a raw string.
"""
- self.do_update_attribute(attr, value)
+ self.do_update_attribute(attr, value, strattr)
[docs] def do_batch_delete(self, attribute_list):
"""
@@ -945,7 +947,7 @@
self._category_storage[category].append(new_attr)
return new_attr
-[docs] def do_update_attribute(self, attr, value):
+
[docs] def do_batch_update_attribute(self, attr_obj, category, lock_storage, new_value, strvalue):
@@ -1044,8 +1046,14 @@
self._set_cache(key, category, new_attr)
return new_attr
-
+[docs] def do_update_attribute(self, attr, value, strvalue):
+ if strvalue:
+ attr.value = None
+ attr.db_strvalue = value
+ else:
+ attr.value = value
+ attr.db_strvalue = None
+ attr.save(update_fields=["db_strvalue", "db_value"])
[docs] def do_batch_update_attribute(self, attr_obj, category, lock_storage, new_value, strvalue):
attr_obj.db_category = category
@@ -1245,7 +1253,7 @@
if attr_obj:
# update an existing attribute object
attr_obj = attr_obj[0]
- self.backend.update_attribute(attr_obj, value)
+ self.backend.update_attribute(attr_obj, value, strattr)
else:
# create a new Attribute (no OOB handlers can be notified)
self.backend.create_attribute(keystr, category, lockstring, value, strattr)
diff --git a/docs/1.0-dev/_modules/evennia/utils/ansi.html b/docs/1.0-dev/_modules/evennia/utils/ansi.html
index 9843ce7eff..ba6818ec57 100644
--- a/docs/1.0-dev/_modules/evennia/utils/ansi.html
+++ b/docs/1.0-dev/_modules/evennia/utils/ansi.html
@@ -583,6 +583,7 @@
string (str): The parsed string.
"""
+ string = string or ""
return parser.parse_ansi(string, strip_ansi=strip_ansi, xterm256=xterm256, mxp=mxp)
@@ -599,6 +600,7 @@
string (str): The stripped string.
"""
+ string = string or ""
return parser.parse_ansi(string, strip_ansi=True)
@@ -615,6 +617,7 @@
string (str): the stripped string.
"""
+ string = string or ""
return parser.strip_raw_codes(string)
@@ -632,6 +635,7 @@
Strip MXP markup.
"""
+ string = string or ""
return parser.strip_mxp(string)
@@ -644,6 +648,7 @@
string (str): The raw, escaped string.
"""
+ string = string or ""
return string.replace("{", "{{").replace("|", "||")
diff --git a/docs/1.0-dev/_modules/evennia/utils/text2html.html b/docs/1.0-dev/_modules/evennia/utils/text2html.html
index fd6436d5a5..3f103bdb95 100644
--- a/docs/1.0-dev/_modules/evennia/utils/text2html.html
+++ b/docs/1.0-dev/_modules/evennia/utils/text2html.html
@@ -390,6 +390,12 @@
"clean up invisible spaces"
return match.group(1) + " "
+[docs] def handle_single_first_space(self, text):
+ "Don't swallow an initial lone space"
+ if text.startswith(" "):
+ return " " + text[1:]
+ return text
+
[docs] def parse(self, text, strip_ansi=False):
"""
Main access function, converts a text containing ANSI codes
@@ -403,8 +409,7 @@
text (str): Parsed text.
"""
- # print(f"incoming ansi:\n{text}")
-
+ # print(f"incoming text:\n{text}")
# parse everything to ansi first
text = parse_ansi(text, strip_ansi=strip_ansi, xterm256=True, mxp=True)
# convert all ansi to html
@@ -422,6 +427,7 @@
result = self.convert_urls(result)
result = self.re_double_space(result)
result = self.re_invisible_space(result)
+ result = self.handle_single_first_space(result)
# clean out eventual ansi that was missed
## result = parse_ansi(result, strip_ansi=True)
diff --git a/docs/1.0-dev/_sources/Coding/Changelog.md.txt b/docs/1.0-dev/_sources/Coding/Changelog.md.txt
index 75b36c65c7..6268fa57ec 100644
--- a/docs/1.0-dev/_sources/Coding/Changelog.md.txt
+++ b/docs/1.0-dev/_sources/Coding/Changelog.md.txt
@@ -158,7 +158,8 @@ Up requirements to Django 4.0+, Twisted 22+, Python 3.9 or 3.10
- Have `type/force` default to `update`-mode rather than `reset`mode and add more verbose
warning when using reset mode.
- Attribute storage support defaultdics (Hendher)
-- Add `is_ooc` lockfunc (meant for limiting commands at the OOC level)
+- Add ObjectParent mixin to default game folder template as an easy, ready-made
+ way to override features on all ObjectDB-inheriting objects easily.
## Evennia 0.9.5
diff --git a/docs/1.0-dev/_sources/Components/Objects.md.txt b/docs/1.0-dev/_sources/Components/Objects.md.txt
index d30c41738c..7fb6583ee2 100644
--- a/docs/1.0-dev/_sources/Components/Objects.md.txt
+++ b/docs/1.0-dev/_sources/Components/Objects.md.txt
@@ -5,12 +5,32 @@ All in-game objects in Evennia, be it characters, chairs, monsters, rooms or han
represented by an Evennia *Object*. Objects form the core of Evennia and is probably what you'll
spend most time working with. Objects are [Typeclassed](./Typeclasses.md) entities.
+An Evennia Object is, by definition, a Python class that includes
+[evennia.objects.objects.DefaultObject](evennia.objects.objects.DefaultObject) among its
+parents. Evennia defines several subclasses of `DefaultObject`:
+
+- [evennia.objects.objects.DefaultCharacter](evennia.objects.objects.DefaultCharacter) -
+the normal in-game Character, controlled by a player.
+- [evennia.objects.objects.DefaultRoom](evennia.objects.objects.DefaultRoom) - a location in the game world.
+- [evennia.objects.objects.DefaultExit](evennia.objects.objects.DefaultExit) - an entity that (usually) sits
+in a room and represents a one-way connection to another location.
+
+You will usually not use the `Default*` parents themselves. In `mygame/typeclasses/` there are
+convenient subclasses to use. They are empty, and thus identical to
+the defaults. Tweaking them is one of the main ways to customize you game!
+
+- `mygame.typeclasses.objects.Object` (inherits from `DefaultObject`)
+- `mygame.typeclasses.characters.Character` (inherits from `DefaultCharacter`)
+- `mygame.typeclasses.rooms.Room` (inherits from `DefaultRoom`)
+- `mygame.typeclasses.exits.Exit` (inherits from `DefaultExit`)
+
## How to create your own object types
-An Evennia Object is, per definition, a Python class that includes `evennia.DefaultObject` among its
-parents. In `mygame/typeclasses/objects.py` there is already a class `Object` that inherits from
-`DefaultObject` and that you can inherit from. You can put your new typeclass directly in that
-module or you could organize your code in some other way. Here we assume we make a new module
+You can easily add your own in-game behavior by either modifying one of the typeclasses in
+your game dir or by inheriting from them.
+
+You can put your new typeclass directly in the relevant parent
+module, or you could organize your code in some other way. Here we assume we make a new module
`mygame/typeclasses/flowers.py`:
```python
@@ -29,32 +49,66 @@ module or you could organize your code in some other way. Here we assume we make
self.db.desc = "This is a pretty rose with thorns."
```
-You could save this in the `mygame/typeclasses/objects.py` (then you'd not need to import `Object`)
-or you can put it in a new module. Let's say we do the latter, making a module
-`typeclasses/flowers.py`. Now you just need to point to the class *Rose* with the `@create` command
+Now you just need to point to the class *Rose* with the `create` command
to make a new rose:
@create/drop MyRose:flowers.Rose
-What the `@create` command actually *does* is to use `evennia.create_object`. You can do the same
-thing yourself in code:
+What the `create` command actually *does* is to use the [evennia.create_object](evennia.utils.create.create_object)
+function. You can do the same thing yourself in code:
```python
from evennia import create_object
new_rose = create_object("typeclasses.flowers.Rose", key="MyRose")
```
-(The `@create` command will auto-append the most likely path to your typeclass, if you enter the
+(The `create` command will auto-append the most likely path to your typeclass, if you enter the
call manually you have to give the full path to the class. The `create.create_object` function is
powerful and should be used for all coded object creating (so this is what you use when defining
-your own building commands). Check out the `ev.create_*` functions for how to build other entities
-like [Scripts](./Scripts.md)).
+your own building commands).
This particular Rose class doesn't really do much, all it does it make sure the attribute
`desc`(which is what the `look` command looks for) is pre-set, which is pretty pointless since you
-will usually want to change this at build time (using the `@desc` command or using the
-[Spawner](./Prototypes.md)). The `Object` typeclass offers many more hooks that is available
-to use though - see next section.
+will usually want to change this at build time (using the `desc` command or using the
+[Spawner](./Prototypes.md)).
+
+## Adding common functionality
+
+`Object`, `Character`, `Room` and `Exit` also inherit from `mygame.typeclasses.objects.ObjectParent`.
+This is an empty 'mixin' class. Optionally, you can modify this class if you want to easily add some _common_ functionality to all
+your Objects, Characters, Rooms and Exits at once. You can still customize each subclass separately (see the Python
+docs on [multiple inheritance](https://docs.python.org/3/tutorial/classes.html#multiple-inheritance) for details).
+
+For example:
+
+```python
+# in mygame/typeclasses/objects.py
+# ...
+
+from evennia.objects.objects import DefaultObject
+
+class ObjectParent:
+ def at_pre_get(self, getter, **kwargs):
+ # make all entities by default un-pickable
+ return False
+
+class Object(ObjectParent, DefaultObject):
+ # replaces at_pre_get with its own
+ def at_pre_get(self, getter, **kwargs):
+ return True
+
+# each in their respective modules ...
+
+class Character(ObjectParent, DefaultCharacter):
+ # will inherit at_pre_get from ObjectParent
+ pass
+
+class Exit(ObjectParent, DefaultExit):
+ # Overrides and uses the DefaultExit version of at_pre_get instead
+ def at_pre_get(self, getter, **kwargs):
+ return DefaultExit.at_pre_get(self, getter, **kwargs)
+
+```
## Properties and functions on Objects
diff --git a/docs/1.0-dev/_sources/Contribs/Contrib-Cooldowns.md.txt b/docs/1.0-dev/_sources/Contribs/Contrib-Cooldowns.md.txt
index ada9765a46..7c00b51d64 100644
--- a/docs/1.0-dev/_sources/Contribs/Contrib-Cooldowns.md.txt
+++ b/docs/1.0-dev/_sources/Contribs/Contrib-Cooldowns.md.txt
@@ -30,7 +30,7 @@ customize this if desired by passing a different value for the `db_attribute`
parameter.
```python
-from evennia.contrib.game_systems.cooldowns import Cooldownhandler
+from evennia.contrib.game_systems.cooldowns import CooldownHandler
from evennia.utils.utils import lazy_property
@lazy_property
diff --git a/docs/1.0-dev/api/evennia.commands.default.building.html b/docs/1.0-dev/api/evennia.commands.default.building.html
index ff0e3a80cc..d206d58bb0 100644
--- a/docs/1.0-dev/api/evennia.commands.default.building.html
+++ b/docs/1.0-dev/api/evennia.commands.default.building.html
@@ -543,7 +543,7 @@ You can specify the /force switch to bypass this confirmation.
@@ -584,7 +584,7 @@ You can specify the /force switch to bypass this confirmation.
-
-
search_index_entry = {'aliases': '@del @delete', 'category': 'building', 'key': '@destroy', 'no_prefix': 'destroy del delete', 'tags': '', 'text': '\n permanently delete objects\n\n Usage:\n destroy[/switches] [obj, obj2, obj3, [dbref-dbref], ...]\n\n Switches:\n override - The destroy command will usually avoid accidentally\n destroying account objects. This switch overrides this safety.\n force - destroy without confirmation.\n Examples:\n destroy house, roof, door, 44-78\n destroy 5-10, flower, 45\n destroy/force north\n\n Destroys one or many objects. If dbrefs are used, a range to delete can be\n given, e.g. 4-10. Also the end points will be deleted. This command\n displays a confirmation before destroying, to make sure of your choice.\n You can specify the /force switch to bypass this confirmation.\n '}¶
+search_index_entry = {'aliases': '@delete @del', 'category': 'building', 'key': '@destroy', 'no_prefix': 'destroy delete del', 'tags': '', 'text': '\n permanently delete objects\n\n Usage:\n destroy[/switches] [obj, obj2, obj3, [dbref-dbref], ...]\n\n Switches:\n override - The destroy command will usually avoid accidentally\n destroying account objects. This switch overrides this safety.\n force - destroy without confirmation.\n Examples:\n destroy house, roof, door, 44-78\n destroy 5-10, flower, 45\n destroy/force north\n\n Destroys one or many objects. If dbrefs are used, a range to delete can be\n given, e.g. 4-10. Also the end points will be deleted. This command\n displays a confirmation before destroying, to make sure of your choice.\n You can specify the /force switch to bypass this confirmation.\n '}¶
@@ -1296,7 +1296,7 @@ server settings.
-
-
aliases = ['@typeclasses', '@swap', '@update', '@type', '@parent']¶
+aliases = ['@type', '@swap', '@parent', '@typeclasses', '@update']¶
@@ -1327,7 +1327,7 @@ server settings.
-
-
search_index_entry = {'aliases': '@typeclasses @swap @update @type @parent', 'category': 'building', 'key': '@typeclass', 'no_prefix': 'typeclass typeclasses swap update type parent', 'tags': '', 'text': "\n set or change an object's typeclass\n\n Usage:\n typeclass[/switch] <object> [= typeclass.path]\n typeclass/prototype <object> = prototype_key\n\n typeclasses or typeclass/list/show [typeclass.path]\n swap - this is a shorthand for using /force/reset flags.\n update - this is a shorthand for using the /force/reload flag.\n\n Switch:\n show, examine - display the current typeclass of object (default) or, if\n given a typeclass path, show the docstring of that typeclass.\n update - *only* re-run at_object_creation on this object\n meaning locks or other properties set later may remain.\n reset - clean out *all* the attributes and properties on the\n object - basically making this a new clean object. This will also\n reset cmdsets!\n force - change to the typeclass also if the object\n already has a typeclass of the same name.\n list - show available typeclasses. Only typeclasses in modules actually\n imported or used from somewhere in the code will show up here\n (those typeclasses are still available if you know the path)\n prototype - clean and overwrite the object with the specified\n prototype key - effectively making a whole new object.\n\n Example:\n type button = examples.red_button.RedButton\n type/prototype button=a red button\n\n If the typeclass_path is not given, the current object's typeclass is\n assumed.\n\n View or set an object's typeclass. If setting, the creation hooks of the\n new typeclass will be run on the object. If you have clashing properties on\n the old class, use /reset. By default you are protected from changing to a\n typeclass of the same name as the one you already have - use /force to\n override this protection.\n\n The given typeclass must be identified by its location using python\n dot-notation pointing to the correct module and class. If no typeclass is\n given (or a wrong typeclass is given). Errors in the path or new typeclass\n will lead to the old typeclass being kept. The location of the typeclass\n module is searched from the default typeclass directory, as defined in the\n server settings.\n\n "}¶
+search_index_entry = {'aliases': '@type @swap @parent @typeclasses @update', 'category': 'building', 'key': '@typeclass', 'no_prefix': 'typeclass type swap parent typeclasses update', 'tags': '', 'text': "\n set or change an object's typeclass\n\n Usage:\n typeclass[/switch] <object> [= typeclass.path]\n typeclass/prototype <object> = prototype_key\n\n typeclasses or typeclass/list/show [typeclass.path]\n swap - this is a shorthand for using /force/reset flags.\n update - this is a shorthand for using the /force/reload flag.\n\n Switch:\n show, examine - display the current typeclass of object (default) or, if\n given a typeclass path, show the docstring of that typeclass.\n update - *only* re-run at_object_creation on this object\n meaning locks or other properties set later may remain.\n reset - clean out *all* the attributes and properties on the\n object - basically making this a new clean object. This will also\n reset cmdsets!\n force - change to the typeclass also if the object\n already has a typeclass of the same name.\n list - show available typeclasses. Only typeclasses in modules actually\n imported or used from somewhere in the code will show up here\n (those typeclasses are still available if you know the path)\n prototype - clean and overwrite the object with the specified\n prototype key - effectively making a whole new object.\n\n Example:\n type button = examples.red_button.RedButton\n type/prototype button=a red button\n\n If the typeclass_path is not given, the current object's typeclass is\n assumed.\n\n View or set an object's typeclass. If setting, the creation hooks of the\n new typeclass will be run on the object. If you have clashing properties on\n the old class, use /reset. By default you are protected from changing to a\n typeclass of the same name as the one you already have - use /force to\n override this protection.\n\n The given typeclass must be identified by its location using python\n dot-notation pointing to the correct module and class. If no typeclass is\n given (or a wrong typeclass is given). Errors in the path or new typeclass\n will lead to the old typeclass being kept. The location of the typeclass\n module is searched from the default typeclass directory, as defined in the\n server settings.\n\n "}¶
@@ -1784,7 +1784,7 @@ one is given.
@@ -1815,7 +1815,7 @@ one is given.
-
-
search_index_entry = {'aliases': '@locate @search', 'category': 'building', 'key': '@find', 'no_prefix': 'find locate search', 'tags': '', 'text': '\n search the database for objects\n\n Usage:\n find[/switches] <name or dbref or *account> [= dbrefmin[-dbrefmax]]\n locate - this is a shorthand for using the /loc switch.\n\n Switches:\n room - only look for rooms (location=None)\n exit - only look for exits (destination!=None)\n char - only look for characters (BASE_CHARACTER_TYPECLASS)\n exact - only exact matches are returned.\n loc - display object location if exists and match has one result\n startswith - search for names starting with the string, rather than containing\n\n Searches the database for an object of a particular name or exact #dbref.\n Use *accountname to search for an account. The switches allows for\n limiting object matches to certain game entities. Dbrefmin and dbrefmax\n limits matches to within the given dbrefs range, or above/below if only\n one is given.\n '}¶
+search_index_entry = {'aliases': '@search @locate', 'category': 'building', 'key': '@find', 'no_prefix': 'find search locate', 'tags': '', 'text': '\n search the database for objects\n\n Usage:\n find[/switches] <name or dbref or *account> [= dbrefmin[-dbrefmax]]\n locate - this is a shorthand for using the /loc switch.\n\n Switches:\n room - only look for rooms (location=None)\n exit - only look for exits (destination!=None)\n char - only look for characters (BASE_CHARACTER_TYPECLASS)\n exact - only exact matches are returned.\n loc - display object location if exists and match has one result\n startswith - search for names starting with the string, rather than containing\n\n Searches the database for an object of a particular name or exact #dbref.\n Use *accountname to search for an account. The switches allows for\n limiting object matches to certain game entities. Dbrefmin and dbrefmax\n limits matches to within the given dbrefs range, or above/below if only\n one is given.\n '}¶
diff --git a/docs/1.0-dev/api/evennia.commands.default.general.html b/docs/1.0-dev/api/evennia.commands.default.general.html
index 55f5deb07e..7d98da9c0a 100644
--- a/docs/1.0-dev/api/evennia.commands.default.general.html
+++ b/docs/1.0-dev/api/evennia.commands.default.general.html
@@ -274,7 +274,7 @@ inv
@@ -305,7 +305,7 @@ inv
-
-
search_index_entry = {'aliases': 'inv i', 'category': 'general', 'key': 'inventory', 'no_prefix': ' inv i', 'tags': '', 'text': '\n view inventory\n\n Usage:\n inventory\n inv\n\n Shows your inventory.\n '}¶
+search_index_entry = {'aliases': 'i inv', 'category': 'general', 'key': 'inventory', 'no_prefix': ' i inv', 'tags': '', 'text': '\n view inventory\n\n Usage:\n inventory\n inv\n\n Shows your inventory.\n '}¶
@@ -549,7 +549,7 @@ placing it in their inventory.
@@ -580,7 +580,7 @@ placing it in their inventory.
-
-
search_index_entry = {'aliases': '" \'', 'category': 'general', 'key': 'say', 'no_prefix': ' " \'', 'tags': '', 'text': '\n speak as your character\n\n Usage:\n say <message>\n\n Talk to those in your current location.\n '}¶
+search_index_entry = {'aliases': '\' "', 'category': 'general', 'key': 'say', 'no_prefix': ' \' "', 'tags': '', 'text': '\n speak as your character\n\n Usage:\n say <message>\n\n Talk to those in your current location.\n '}¶
@@ -660,7 +660,7 @@ automatically begin with your name.
@@ -701,7 +701,7 @@ space.
-
-
search_index_entry = {'aliases': ': emote', 'category': 'general', 'key': 'pose', 'no_prefix': ' : emote', 'tags': '', 'text': "\n strike a pose\n\n Usage:\n pose <pose text>\n pose's <pose text>\n\n Example:\n pose is standing by the wall, smiling.\n -> others will see:\n Tom is standing by the wall, smiling.\n\n Describe an action being taken. The pose text will\n automatically begin with your name.\n "}¶
+search_index_entry = {'aliases': 'emote :', 'category': 'general', 'key': 'pose', 'no_prefix': ' emote :', 'tags': '', 'text': "\n strike a pose\n\n Usage:\n pose <pose text>\n pose's <pose text>\n\n Example:\n pose is standing by the wall, smiling.\n -> others will see:\n Tom is standing by the wall, smiling.\n\n Describe an action being taken. The pose text will\n automatically begin with your name.\n "}¶
@@ -724,7 +724,7 @@ which permission groups you are a member of.
@@ -755,7 +755,7 @@ which permission groups you are a member of.
-
-
search_index_entry = {'aliases': 'groups hierarchy', 'category': 'general', 'key': 'access', 'no_prefix': ' groups hierarchy', 'tags': '', 'text': '\n show your current game access\n\n Usage:\n access\n\n This command shows you the permission hierarchy and\n which permission groups you are a member of.\n '}¶
+search_index_entry = {'aliases': 'hierarchy groups', 'category': 'general', 'key': 'access', 'no_prefix': ' hierarchy groups', 'tags': '', 'text': '\n show your current game access\n\n Usage:\n access\n\n This command shows you the permission hierarchy and\n which permission groups you are a member of.\n '}¶
diff --git a/docs/1.0-dev/api/evennia.commands.default.tests.html b/docs/1.0-dev/api/evennia.commands.default.tests.html
index 0434a08c5f..27d0347610 100644
--- a/docs/1.0-dev/api/evennia.commands.default.tests.html
+++ b/docs/1.0-dev/api/evennia.commands.default.tests.html
@@ -747,7 +747,7 @@ main test suite started with
Test the batch processor.
+red_button = <module 'evennia.contrib.tutorials.red_button.red_button' from '/tmp/tmpklgbha88/479c259dabeddbedca09e5141d1c15aaee3ae951/evennia/contrib/tutorials/red_button/red_button.py'>¶
diff --git a/docs/1.0-dev/api/evennia.commands.default.unloggedin.html b/docs/1.0-dev/api/evennia.commands.default.unloggedin.html
index 737db5b96a..85c2529e90 100644
--- a/docs/1.0-dev/api/evennia.commands.default.unloggedin.html
+++ b/docs/1.0-dev/api/evennia.commands.default.unloggedin.html
@@ -73,7 +73,7 @@ connect “account name” “pass word”
@@ -108,7 +108,7 @@ there is no object yet before the account has logged in)
-
-
search_index_entry = {'aliases': 'conn con co', 'category': 'general', 'key': 'connect', 'no_prefix': ' conn con co', 'tags': '', 'text': '\n connect to the game\n\n Usage (at login screen):\n connect accountname password\n connect "account name" "pass word"\n\n Use the create command to first create an account before logging in.\n\n If you have spaces in your name, enclose it in double quotes.\n '}¶
+search_index_entry = {'aliases': 'con conn co', 'category': 'general', 'key': 'connect', 'no_prefix': ' con conn co', 'tags': '', 'text': '\n connect to the game\n\n Usage (at login screen):\n connect accountname password\n connect "account name" "pass word"\n\n Use the create command to first create an account before logging in.\n\n If you have spaces in your name, enclose it in double quotes.\n '}¶
@@ -132,7 +132,7 @@ create “account name” “pass word”
@@ -163,7 +163,7 @@ create “account name” “pass word”
-
-
search_index_entry = {'aliases': 'cr cre', 'category': 'general', 'key': 'create', 'no_prefix': ' cr cre', 'tags': '', 'text': '\n create a new account account\n\n Usage (at login screen):\n create <accountname> <password>\n create "account name" "pass word"\n\n This creates a new account account.\n\n If you have spaces in your name, enclose it in double quotes.\n '}¶
+search_index_entry = {'aliases': 'cre cr', 'category': 'general', 'key': 'create', 'no_prefix': ' cre cr', 'tags': '', 'text': '\n create a new account account\n\n Usage (at login screen):\n create <accountname> <password>\n create "account name" "pass word"\n\n This creates a new account account.\n\n If you have spaces in your name, enclose it in double quotes.\n '}¶
diff --git a/docs/1.0-dev/api/evennia.contrib.base_systems.email_login.email_login.html b/docs/1.0-dev/api/evennia.contrib.base_systems.email_login.email_login.html
index e12d5de533..e6a3a24d7a 100644
--- a/docs/1.0-dev/api/evennia.contrib.base_systems.email_login.email_login.html
+++ b/docs/1.0-dev/api/evennia.contrib.base_systems.email_login.email_login.html
@@ -90,7 +90,7 @@ the module given by settings.CONNECTION_SCREEN_MODULE.
@@ -120,7 +120,7 @@ there is no object yet before the account has logged in)
-
-
search_index_entry = {'aliases': 'conn con co', 'category': 'general', 'key': 'connect', 'no_prefix': ' conn con co', 'tags': '', 'text': '\n Connect to the game.\n\n Usage (at login screen):\n connect <email> <password>\n\n Use the create command to first create an account before logging in.\n '}¶
+search_index_entry = {'aliases': 'con conn co', 'category': 'general', 'key': 'connect', 'no_prefix': ' con conn co', 'tags': '', 'text': '\n Connect to the game.\n\n Usage (at login screen):\n connect <email> <password>\n\n Use the create command to first create an account before logging in.\n '}¶
@@ -142,7 +142,7 @@ there is no object yet before the account has logged in)
@@ -178,7 +178,7 @@ name enclosed in quotes:
-
-
search_index_entry = {'aliases': 'cr cre', 'category': 'general', 'key': 'create', 'no_prefix': ' cr cre', 'tags': '', 'text': '\n Create a new account.\n\n Usage (at login screen):\n create "accountname" <email> <password>\n\n This creates a new account account.\n\n '}¶
+search_index_entry = {'aliases': 'cre cr', 'category': 'general', 'key': 'create', 'no_prefix': ' cre cr', 'tags': '', 'text': '\n Create a new account.\n\n Usage (at login screen):\n create "accountname" <email> <password>\n\n This creates a new account account.\n\n '}¶
diff --git a/docs/1.0-dev/api/evennia.contrib.base_systems.ingame_python.commands.html b/docs/1.0-dev/api/evennia.contrib.base_systems.ingame_python.commands.html
index 9a3c68aeb8..49d4c999aa 100644
--- a/docs/1.0-dev/api/evennia.contrib.base_systems.ingame_python.commands.html
+++ b/docs/1.0-dev/api/evennia.contrib.base_systems.ingame_python.commands.html
@@ -67,7 +67,7 @@
@@ -148,7 +148,7 @@ on user permission.
-
-
search_index_entry = {'aliases': '@callback @callbacks @calls', 'category': 'building', 'key': '@call', 'no_prefix': 'call callback callbacks calls', 'tags': '', 'text': '\n Command to edit callbacks.\n '}¶
+search_index_entry = {'aliases': '@calls @callback @callbacks', 'category': 'building', 'key': '@call', 'no_prefix': 'call calls callback callbacks', 'tags': '', 'text': '\n Command to edit callbacks.\n '}¶
diff --git a/docs/1.0-dev/api/evennia.contrib.base_systems.mux_comms_cmds.mux_comms_cmds.html b/docs/1.0-dev/api/evennia.contrib.base_systems.mux_comms_cmds.mux_comms_cmds.html
index 9a40c4b31b..a32382d911 100644
--- a/docs/1.0-dev/api/evennia.contrib.base_systems.mux_comms_cmds.mux_comms_cmds.html
+++ b/docs/1.0-dev/api/evennia.contrib.base_systems.mux_comms_cmds.mux_comms_cmds.html
@@ -111,7 +111,7 @@ aliases to an already joined channel.
@@ -142,7 +142,7 @@ aliases to an already joined channel.
-
-
search_index_entry = {'aliases': 'aliaschan chanalias', 'category': 'comms', 'key': 'addcom', 'no_prefix': ' aliaschan chanalias', 'tags': '', 'text': '\n Add a channel alias and/or subscribe to a channel\n\n Usage:\n addcom [alias=] <channel>\n\n Joins a given channel. If alias is given, this will allow you to\n refer to the channel by this alias rather than the full channel\n name. Subsequent calls of this command can be used to add multiple\n aliases to an already joined channel.\n '}¶
+search_index_entry = {'aliases': 'chanalias aliaschan', 'category': 'comms', 'key': 'addcom', 'no_prefix': ' chanalias aliaschan', 'tags': '', 'text': '\n Add a channel alias and/or subscribe to a channel\n\n Usage:\n addcom [alias=] <channel>\n\n Joins a given channel. If alias is given, this will allow you to\n refer to the channel by this alias rather than the full channel\n name. Subsequent calls of this command can be used to add multiple\n aliases to an already joined channel.\n '}¶
diff --git a/docs/1.0-dev/api/evennia.contrib.full_systems.evscaperoom.commands.html b/docs/1.0-dev/api/evennia.contrib.full_systems.evscaperoom.commands.html
index b4820045c5..735e75de16 100644
--- a/docs/1.0-dev/api/evennia.contrib.full_systems.evscaperoom.commands.html
+++ b/docs/1.0-dev/api/evennia.contrib.full_systems.evscaperoom.commands.html
@@ -162,7 +162,7 @@ the operation will be general or on the room.
@@ -186,7 +186,7 @@ set in self.parse())
-
-
search_index_entry = {'aliases': 'q quit abort chicken out', 'category': 'evscaperoom', 'key': 'give up', 'no_prefix': ' q quit abort chicken out', 'tags': '', 'text': '\n Give up\n\n Usage:\n give up\n\n Abandons your attempts at escaping and of ever winning the pie-eating contest.\n\n '}¶
+search_index_entry = {'aliases': 'quit chicken out q abort', 'category': 'evscaperoom', 'key': 'give up', 'no_prefix': ' quit chicken out q abort', 'tags': '', 'text': '\n Give up\n\n Usage:\n give up\n\n Abandons your attempts at escaping and of ever winning the pie-eating contest.\n\n '}¶
@@ -322,7 +322,7 @@ shout
@@ -351,7 +351,7 @@ set in self.parse())
-
-
search_index_entry = {'aliases': 'whisper shout ;', 'category': 'general', 'key': 'say', 'no_prefix': ' whisper shout ;', 'tags': '', 'text': '\n Perform an communication action.\n\n Usage:\n say <text>\n whisper\n shout\n\n '}¶
+search_index_entry = {'aliases': '; whisper shout', 'category': 'general', 'key': 'say', 'no_prefix': ' ; whisper shout', 'tags': '', 'text': '\n Perform an communication action.\n\n Usage:\n say <text>\n whisper\n shout\n\n '}¶
@@ -441,7 +441,7 @@ looks and what actions is available.
@@ -470,7 +470,7 @@ set in self.parse())
-
-
search_index_entry = {'aliases': 'examine ex e unfocus', 'category': 'evscaperoom', 'key': 'focus', 'no_prefix': ' examine ex e unfocus', 'tags': '', 'text': '\n Focus your attention on a target.\n\n Usage:\n focus <obj>\n\n Once focusing on an object, use look to get more information about how it\n looks and what actions is available.\n\n '}¶
+search_index_entry = {'aliases': 'ex e unfocus examine', 'category': 'evscaperoom', 'key': 'focus', 'no_prefix': ' ex e unfocus examine', 'tags': '', 'text': '\n Focus your attention on a target.\n\n Usage:\n focus <obj>\n\n Once focusing on an object, use look to get more information about how it\n looks and what actions is available.\n\n '}¶
@@ -532,7 +532,7 @@ set in self.parse())
@@ -556,7 +556,7 @@ set in self.parse())
-
-
search_index_entry = {'aliases': 'inv inventory i give', 'category': 'evscaperoom', 'key': 'get', 'no_prefix': ' inv inventory i give', 'tags': '', 'text': '\n Use focus / examine instead.\n\n '}¶
+search_index_entry = {'aliases': 'i inventory give inv', 'category': 'evscaperoom', 'key': 'get', 'no_prefix': ' i inventory give inv', 'tags': '', 'text': '\n Use focus / examine instead.\n\n '}¶
@@ -577,7 +577,7 @@ set in self.parse())
@@ -600,7 +600,7 @@ to all the variables defined therein.
-
-
search_index_entry = {'aliases': '@dig @open', 'category': 'general', 'key': 'open', 'no_prefix': ' dig open', 'tags': '', 'text': '\n Interact with an object in focus.\n\n Usage:\n <action> [arg]\n\n '}¶
+search_index_entry = {'aliases': '@open @dig', 'category': 'general', 'key': 'open', 'no_prefix': ' open dig', 'tags': '', 'text': '\n Interact with an object in focus.\n\n Usage:\n <action> [arg]\n\n '}¶
diff --git a/docs/1.0-dev/api/evennia.contrib.game_systems.clothing.clothing.html b/docs/1.0-dev/api/evennia.contrib.game_systems.clothing.clothing.html
index 3304dc8c6d..735d68fcf6 100644
--- a/docs/1.0-dev/api/evennia.contrib.game_systems.clothing.clothing.html
+++ b/docs/1.0-dev/api/evennia.contrib.game_systems.clothing.clothing.html
@@ -643,7 +643,7 @@ inv
@@ -674,7 +674,7 @@ inv
-
-
search_index_entry = {'aliases': 'inv i', 'category': 'general', 'key': 'inventory', 'no_prefix': ' inv i', 'tags': '', 'text': '\n view inventory\n\n Usage:\n inventory\n inv\n\n Shows your inventory.\n '}¶
+search_index_entry = {'aliases': 'i inv', 'category': 'general', 'key': 'inventory', 'no_prefix': ' i inv', 'tags': '', 'text': '\n view inventory\n\n Usage:\n inventory\n inv\n\n Shows your inventory.\n '}¶
diff --git a/docs/1.0-dev/api/evennia.contrib.rpg.rpsystem.rpsystem.html b/docs/1.0-dev/api/evennia.contrib.rpg.rpsystem.rpsystem.html
index 92a1269bf9..d4a16c37c2 100644
--- a/docs/1.0-dev/api/evennia.contrib.rpg.rpsystem.rpsystem.html
+++ b/docs/1.0-dev/api/evennia.contrib.rpg.rpsystem.rpsystem.html
@@ -712,7 +712,7 @@ a different language.
@@ -743,7 +743,7 @@ a different language.
-
-
search_index_entry = {'aliases': '" \'', 'category': 'general', 'key': 'say', 'no_prefix': ' " \'', 'tags': '', 'text': '\n speak as your character\n\n Usage:\n say <message>\n\n Talk to those in your current location.\n '}¶
+search_index_entry = {'aliases': '\' "', 'category': 'general', 'key': 'say', 'no_prefix': ' \' "', 'tags': '', 'text': '\n speak as your character\n\n Usage:\n say <message>\n\n Talk to those in your current location.\n '}¶
@@ -882,7 +882,7 @@ Using the command without arguments will list all current recogs.
@@ -909,7 +909,7 @@ Using the command without arguments will list all current recogs.
-
-
search_index_entry = {'aliases': 'forget recognize', 'category': 'general', 'key': 'recog', 'no_prefix': ' forget recognize', 'tags': '', 'text': '\n Recognize another person in the same room.\n\n Usage:\n recog\n recog sdesc as alias\n forget alias\n\n Example:\n recog tall man as Griatch\n forget griatch\n\n This will assign a personal alias for a person, or forget said alias.\n Using the command without arguments will list all current recogs.\n\n '}¶
+search_index_entry = {'aliases': 'recognize forget', 'category': 'general', 'key': 'recog', 'no_prefix': ' recognize forget', 'tags': '', 'text': '\n Recognize another person in the same room.\n\n Usage:\n recog\n recog sdesc as alias\n forget alias\n\n Example:\n recog tall man as Griatch\n forget griatch\n\n This will assign a personal alias for a person, or forget said alias.\n Using the command without arguments will list all current recogs.\n\n '}¶
diff --git a/docs/1.0-dev/api/evennia.contrib.rpg.traits.tests.html b/docs/1.0-dev/api/evennia.contrib.rpg.traits.tests.html
index c9f412aca3..0532a46d8c 100644
--- a/docs/1.0-dev/api/evennia.contrib.rpg.traits.tests.html
+++ b/docs/1.0-dev/api/evennia.contrib.rpg.traits.tests.html
@@ -175,7 +175,7 @@
@@ -205,7 +205,7 @@
-
test_value()[source]¶
-value is current + mod, where current defaults to base
+value is (current + mod) * mult, where current defaults to base
diff --git a/docs/1.0-dev/api/evennia.contrib.rpg.traits.traits.html b/docs/1.0-dev/api/evennia.contrib.rpg.traits.traits.html
index 985759386c..b9b36728d4 100644
--- a/docs/1.0-dev/api/evennia.contrib.rpg.traits.traits.html
+++ b/docs/1.0-dev/api/evennia.contrib.rpg.traits.traits.html
@@ -91,7 +91,7 @@ define it explicitly. You can combine both styles if you like.
def at_object_creation(self):
# (or wherever you want)
- self.traits.add("str", "Strength", trait_type="static", base=10, mod=2)
+ self.traits.add("str", "Strength", trait_type="static", base=10, mod=2, mult=2.0)
self.traits.add("hp", "Health", trait_type="gauge", min=0, max=100)
self.traits.add("hunting", "Hunting Skill", trait_type="counter",
base=10, mod=1, min=0, max=100)
@@ -117,9 +117,9 @@ other properties/methods on your class.
class Object(DefaultObject):
...
- strength = TraitProperty("Strength", trait_type="static", base=10, mod=2)
+ strength = TraitProperty("Strength", trait_type="static", base=10, mod=2, mult=1.5)
health = TraitProperty("Health", trait_type="gauge", min=0, base=100, mod=2)
- hunting = TraitProperty("Hunting Skill", trait_type="counter", base=10, mod=1, min=0, max=100)
+ hunting = TraitProperty("Hunting Skill", trait_type="counter", base=10, mod=1, mult=2.0, min=0, max=100)
> Note that the property-name will become the name of the trait and you don’t supply trait_key
@@ -141,14 +141,14 @@ manipulate it directly (if you try, it will just remain unchanged). The
on combining fields, like .base and .mod - which fields are available and how they relate to
each other depends on the trait type.
> obj.traits.strength.value
-12 # base + mod
+18 # (base + mod) * mult
-> obj.traits.strength.base += 5
+> obj.traits.strength.base += 6
obj.traits.strength.value
-17
+27
> obj.traits.hp.value
-102 # base + mod
+102 # (base + mod) * mult
> obj.traits.hp.base -= 200
> obj.traits.hp.value
@@ -170,11 +170,11 @@ each other depends on the trait type.
# with TraitProperties:
> obj.hunting.value
-12
+22
> obj.strength.value += 5
> obj.strength.value
-17
+32
value = base + mod
-The static trait has a base value and an optional mod-ifier. A typical use -of a static trait would be a Strength stat or Skill value. That is, something -that varies slowly or not at all, and which may be modified in-place.
+value = (base + mod) * mult
+The static trait has a base value and an optional mod-ifier and ‘mult’-iplier. +The modifier defaults to 0, and the multiplier to 1.0, for no change in value. +A typical use of a static trait would be a Strength stat or Skill value. That is, +somethingthat varies slowly or not at all, and which may be modified in-place.
> obj.traits.add("str", "Strength", trait_type="static", base=10, mod=2)
> obj.traits.mytrait.value
-
12 # base + mod
+
> obj.traits.mytrait.base += 2
> obj.traits.mytrait.mod += 1
> obj.traits.mytrait.value
15
> obj.traits.mytrait.mod = 0
+> obj.traits.mytrait.mult = 2.0
> obj.traits.mytrait.value
-12
+20
min/unset base base+mod max/unset
|--------------|--------|---------X--------X------------|
current value
- = current
- + mod
+ = (current
+ + mod)
+ * mult
A counter describes a value that can move from a base. The .current property is the thing usually modified. It starts at the .base. One can also add a -modifier, which will both be added to the base and to current (forming -.value). The min/max of the range are optional, a boundary set to None will -remove it. A suggested use for a Counter Trait would be to track skill values.
+modifier, which is added to both the base and to current. ‘.value’ is then formed +by multiplying by the multiplier, which defaults to 1.0 for no change. The min/max +of the range are optional, a boundary set to None will remove it. A suggested use +for a Counter Trait would be to track skill values.> obj.traits.add("hunting", "Hunting Skill", trait_type="counter",
- base=10, mod=1, min=0, max=100)
+ base=10, mod=1, mult=1.0, min=0, max=100)
> obj.traits.hunting.value
11 # current starts at base + mod
@@ -249,7 +253,10 @@ remove it. A suggested use for a Counter Trait would be to track skill values.
> del obj.traits.hunting.current
> obj.traits.hunting.value
11
+
> obj.traits.hunting.max = None # removing upper bound
+> obj.traits.hunting.mult = 100.0
+1100
# for TraitProperties, pass the args/kwargs of traits.add() to the
# TraitProperty constructor instead.
@@ -711,8 +718,8 @@ initialization of this trait.
class evennia.contrib.rpg.traits.traits.StaticTrait(trait_data)[source]¶
Bases: evennia.contrib.rpg.traits.traits.Trait
Static Trait. This is a single value with a modifier, -with no concept of a ‘current’ value or min/max etc.
-value = base + mod
+multiplier, and no concept of a ‘current’ value or min/max etc. +value = (base + mod) * mult
trait_type = 'static'¶The trait’s modifier.
mult¶The trait’s multiplier.
+value¶The value of the Trait
+The value of the Trait.
This includes modifications and min/max limits as well as the notion of a current value. The value can also be reset to the base value.
= current -+ mod
+= (current ++ mod) +* mult
value = current + mod, starts at base + mod
value = (current + mod) * mult, starts at (base + mod) * mult
if min or max is None, there is no upper/lower bound (default)
if max is set to “base”, max will be equal ot base+mod
descs are used to optionally describe each value interval. @@ -792,7 +806,7 @@ ratetarget. Setting the rate to 0 (default) stops any change.
default_keys = {'base': 0, 'descs': None, 'max': None, 'min': None, 'mod': 0, 'rate': 0, 'ratetarget': None}¶default_keys = {'base': 0, 'descs': None, 'max': None, 'min': None, 'mod': 0, 'mult': 1.0, 'rate': 0, 'ratetarget': None}¶
mod¶
mult¶min¶current¶The current value of the Trait. This does not have .mod added.
+The current value of the Trait. This does not have .mod added and is not .mult-iplied.
value¶The value of the Trait (current + mod)
+The value of the Trait. (current + mod) * mult
evennia.contrib.rpg.traits.traits.GaugeTrait(trait_data)[source]¶
Bases: evennia.contrib.rpg.traits.traits.CounterTrait
Gauge Trait.
-This emulates a gauge-meter that empties from a base+mod value.
+This emulates a gauge-meter that empties from a (base+mod) * mult value.
@@ -907,7 +926,7 @@ describe the interval.value
min defaults to 0
max value is always base + mad
max value is always (base + mod) * mult
.max is an alias of .base
value = current and varies from min to max.
default_keys = {'base': 0, 'descs': None, 'min': 0, 'mod': 0, 'rate': 0, 'ratetarget': None}¶default_keys = {'base': 0, 'descs': None, 'min': 0, 'mod': 0, 'mult': 1.0, 'rate': 0, 'ratetarget': None}¶
mod¶
mult¶min¶max¶The max is always base + mod.
+The max is always (base + mod) * mult.
aliases = ['press button', 'press', 'push']¶
search_index_entry = {'aliases': 'press button press push', 'category': 'general', 'key': 'push button', 'no_prefix': ' press button press push', 'tags': '', 'text': '\n Push the red button (lid closed)\n\n Usage:\n push button\n\n '}¶
aliases = ['smash lid', 'smash', 'break lid']¶
search_index_entry = {'aliases': 'smash lid smash break lid', 'category': 'general', 'key': 'smash glass', 'no_prefix': ' smash lid smash break lid', 'tags': '', 'text': '\n Smash the protective glass.\n\n Usage:\n smash glass\n\n Try to smash the glass of the button.\n\n '}¶
aliases = ['press button', 'press', 'push']¶
search_index_entry = {'aliases': 'press button press push', 'category': 'general', 'key': 'push button', 'no_prefix': ' press button press push', 'tags': '', 'text': '\n Push the red button\n\n Usage:\n push button\n\n '}¶
aliases = ['feel', 'listen', 'l', 'ex', 'get', 'examine']¶
search_index_entry = {'aliases': 'feel listen l ex get examine', 'category': 'general', 'key': 'look', 'no_prefix': ' feel listen l ex get examine', 'tags': '', 'text': "\n Looking around in darkness\n\n Usage:\n look <obj>\n\n ... not that there's much to see in the dark.\n\n "}¶
search_index_entry = {'aliases': 'light burn', 'category': 'tutorialworld', 'key': 'on', 'no_prefix': ' light burn', 'tags': '', 'text': '\n Creates light where there was none. Something to burn.\n '}¶search_index_entry = {'aliases': 'burn light', 'category': 'tutorialworld', 'key': 'on', 'no_prefix': ' burn light', 'tags': '', 'text': '\n Creates light where there was none. Something to burn.\n '}¶
search_index_entry = {'aliases': 'pull push shiftroot move', 'category': 'tutorialworld', 'key': 'shift', 'no_prefix': ' pull push shiftroot move', 'tags': '', 'text': '\n Shifts roots around.\n\n Usage:\n shift blue root left/right\n shift red root left/right\n shift yellow root up/down\n shift green root up/down\n\n '}¶search_index_entry = {'aliases': 'pull move shiftroot push', 'category': 'tutorialworld', 'key': 'shift', 'no_prefix': ' pull move shiftroot push', 'tags': '', 'text': '\n Shifts roots around.\n\n Usage:\n shift blue root left/right\n shift red root left/right\n shift yellow root up/down\n shift green root up/down\n\n '}¶
aliases = ['press button', 'push button', 'button']¶aliases = ['press button', 'button', 'push button']¶
search_index_entry = {'aliases': 'press button push button button', 'category': 'tutorialworld', 'key': 'press', 'no_prefix': ' press button push button button', 'tags': '', 'text': '\n Presses a button.\n '}¶search_index_entry = {'aliases': 'press button button push button', 'category': 'tutorialworld', 'key': 'press', 'no_prefix': ' press button button push button', 'tags': '', 'text': '\n Presses a button.\n '}¶
aliases = ['fight', 'bash', 'slash', 'stab', 'thrust', 'parry', 'pierce', 'chop', 'kill', 'defend', 'hit']¶aliases = ['fight', 'defend', 'slash', 'pierce', 'parry', 'stab', 'kill', 'thrust', 'hit', 'bash', 'chop']¶
search_index_entry = {'aliases': 'fight bash slash stab thrust parry pierce chop kill defend hit', 'category': 'tutorialworld', 'key': 'attack', 'no_prefix': ' fight bash slash stab thrust parry pierce chop kill defend hit', 'tags': '', 'text': '\n Attack the enemy. Commands:\n\n stab <enemy>\n slash <enemy>\n parry\n\n stab - (thrust) makes a lot of damage but is harder to hit with.\n slash - is easier to land, but does not make as much damage.\n parry - forgoes your attack but will make you harder to hit on next\n enemy attack.\n\n '}¶search_index_entry = {'aliases': 'fight defend slash pierce parry stab kill thrust hit bash chop', 'category': 'tutorialworld', 'key': 'attack', 'no_prefix': ' fight defend slash pierce parry stab kill thrust hit bash chop', 'tags': '', 'text': '\n Attack the enemy. Commands:\n\n stab <enemy>\n slash <enemy>\n parry\n\n stab - (thrust) makes a lot of damage but is harder to hit with.\n slash - is easier to land, but does not make as much damage.\n parry - forgoes your attack but will make you harder to hit on next\n enemy attack.\n\n '}¶
aliases = ['l', 'feel around', 'feel', 'search', 'fiddle']¶aliases = ['feel', 'feel around', 'l', 'fiddle', 'search']¶
search_index_entry = {'aliases': 'l feel around feel search fiddle', 'category': 'tutorialworld', 'key': 'look', 'no_prefix': ' l feel around feel search fiddle', 'tags': '', 'text': '\n Look around in darkness\n\n Usage:\n look\n\n Look around in the darkness, trying\n to find something.\n '}¶search_index_entry = {'aliases': 'feel feel around l fiddle search', 'category': 'tutorialworld', 'key': 'look', 'no_prefix': ' feel feel around l fiddle search', 'tags': '', 'text': '\n Look around in darkness\n\n Usage:\n look\n\n Look around in the darkness, trying\n to find something.\n '}¶
lockstring = 'puppet:id({character_id}) or pid({account_id}) or perm(Developer) or pperm(Developer);delete:id({account_id}) or perm(Admin)'¶lockstring = 'puppet:id({character_id}) or pid({account_id}) or perm(Developer) or pperm(Developer);delete:id({account_id}) or perm(Admin);edit:pid({account_id}) or perm(Admin)'¶
do_update_attribute(attr, value)[source]¶do_update_attribute(attr, value, strvalue)[source]¶
Simply sets a new Value to an Attribute.
attr (IAttribute) – The Attribute being changed.
value (obj) – The Value for the Attribute.
strvalue (bool) – If True, value is expected to be a string.
update_attribute(attr, value)[source]¶update_attribute(attr, value, strattr=False)[source]¶
Simply updates an Attribute.
attr (IAttribute) – The attribute to delete.
value (obj) – The new value.
strattr (bool) – If set, the value is a raw string.
do_update_attribute(attr, value)[source]¶do_update_attribute(attr, value, strvalue)[source]¶
Simply sets a new Value to an Attribute.
attr (IAttribute) – The Attribute being changed.
value (obj) – The Value for the Attribute.
strvalue (bool) – If True, value is expected to be a string.
do_update_attribute(attr, value)[source]¶do_update_attribute(attr, value, strvalue)[source]¶
Simply sets a new Value to an Attribute.
attr (IAttribute) – The Attribute being changed.
value (obj) – The Value for the Attribute.
strvalue (bool) – If True, value is expected to be a string.
aliases = [':x', '::', ':fi', ':>', ':::', ':j', ':dw', ':S', ':', ':u', ':<', ':f', ':w', ':=', ':r', ':!', ':DD', ':A', ':echo', ':s', ':UU', ':wq', ':h', ':fd', ':i', ':q', ':p', ':I', ':uu', ':y', ':q!', ':dd']¶aliases = [':i', ':fi', ':DD', ':S', ':!', ':j', ':=', ':fd', ':dd', ':y', ':p', ':f', '::', ':s', ':uu', ':', ':w', ':echo', ':q', ':::', ':h', ':dw', ':<', ':u', ':r', ':I', ':UU', ':>', ':q!', ':x', ':wq', ':A']¶
search_index_entry = {'aliases': ':x :: :fi :> ::: :j :dw :S : :u :< :f :w := :r :! :DD :A :echo :s :UU :wq :h :fd :i :q :p :I :uu :y :q! :dd', 'category': 'general', 'key': ':editor_command_group', 'no_prefix': ' :x :: :fi :> ::: :j :dw :S : :u :< :f :w := :r :! :DD :A :echo :s :UU :wq :h :fd :i :q :p :I :uu :y :q! :dd', 'tags': '', 'text': '\n Commands for the editor\n '}¶search_index_entry = {'aliases': ':i :fi :DD :S :! :j := :fd :dd :y :p :f :: :s :uu : :w :echo :q ::: :h :dw :< :u :r :I :UU :> :q! :x :wq :A', 'category': 'general', 'key': ':editor_command_group', 'no_prefix': ' :i :fi :DD :S :! :j := :fd :dd :y :p :f :: :s :uu : :w :echo :q ::: :h :dw :< :u :r :I :UU :> :q! :x :wq :A', 'tags': '', 'text': '\n Commands for the editor\n '}¶
aliases = ['n', 'yes', '__nomatch_command', 'y', 'a', 'no', 'abort']¶
search_index_entry = {'aliases': 'n yes __nomatch_command y a no abort', 'category': 'general', 'key': '__noinput_command', 'no_prefix': ' n yes __nomatch_command y a no abort', 'tags': '', 'text': '\n Handle a prompt for yes or no. Press [return] for the default choice.\n\n '}¶
aliases = ['previous', 'top', 'end', 'quit', 'a', 'e', 'q', 'next', 'abort', 'p', 't', 'n']¶aliases = ['t', 'quit', 'n', 'e', 'p', 'next', 'previous', 'top', 'a', 'q', 'end', 'abort']¶
search_index_entry = {'aliases': 'previous top end quit a e q next abort p t n', 'category': 'general', 'key': '__noinput_command', 'no_prefix': ' previous top end quit a e q next abort p t n', 'tags': '', 'text': '\n Manipulate the text paging. Catch no-input with aliases.\n '}¶search_index_entry = {'aliases': 't quit n e p next previous top a q end abort', 'category': 'general', 'key': '__noinput_command', 'no_prefix': ' t quit n e p next previous top a q end abort', 'tags': '', 'text': '\n Manipulate the text paging. Catch no-input with aliases.\n '}¶
clean up invisible spaces
parse(text, strip_ansi=False)[source]¶