From 3980a8fe69ef8d72d3f43a529f177a2e4f59a13d Mon Sep 17 00:00:00 2001 From: trhr Date: Thu, 20 Feb 2020 23:44:18 -0600 Subject: [PATCH 1/7] Boto3 / AWS contrib plugin --- evennia/contrib/aws-s3-cdn.py | 847 ++++++++++++++++++++++++++++++++++ 1 file changed, 847 insertions(+) create mode 100644 evennia/contrib/aws-s3-cdn.py diff --git a/evennia/contrib/aws-s3-cdn.py b/evennia/contrib/aws-s3-cdn.py new file mode 100644 index 0000000000..dbbdf59063 --- /dev/null +++ b/evennia/contrib/aws-s3-cdn.py @@ -0,0 +1,847 @@ +""" +This plugin migrates the Web-based portion of Evennia, +namely images, javascript, and other items located +inside staticfiles into Amazon AWS (S3) for hosting. + +INSTALLATION: + +1) If you don't have an AWS S3 account, you should create one now. + +Credentials required are an AWS IAM Access Key and Secret Keys, +which can be generated/found in the AWS Console. + +Example IAM Control Policy Permissions, if desired: + +{ + "Version": "2012-10-17", + "Statement": [ + { + "Sid": "evennia", + "Effect": "Allow", + "Action": [ + "s3:PutObject", + "s3:GetObjectAcl", + "s3:GetObject", + "s3:ListBucket", + "s3:DeleteObject", + "s3:PutObjectAcl" + ], + "Resource": [ + "arn:aws:s3:::YOUR_BUCKET_NAME/*", + "arn:aws:s3:::YOUR_BUCKET_NAME" + ] + } + ], + [ + { + "Sid":"evennia", + "Effect":"Allow", + "Action":[ + "s3:CreateBucket", + ], + "Resource":[ + "arn:aws:s3:::*" + ] + } + ] +} + +Advanced Users: The second IAM statement, CreateBucket, is only needed +for initial installation. You can remove it later, or you can +create the bucket and set the ACL yourself before you continue. + +2) This package requires the dependency "boto3," the official +AWS python package. You can install it with 'pip install boto3' +while inside your evennia virtual environment (or, simply +in your shell if you don't use a virtual environment). + +3) Customize the variables defined below in secret_settings.py, +then run 'evennia stop', 'evennia start', 'evennia collectstatic' + +AWS_ACCESS_KEY_ID = 'EUHUB20BU08AEU7' # CHANGE ME! +AWS_SECRET_ACCESS_KEY = 'a/uoexauodabuq4j;kmw;kvka0d2' # CHANGE ME! +AWS_STORAGE_BUCKET_NAME = 'mygame-evennia' # CHANGE ME! +AWS_S3_REGION_NAME = 'us-east-1' # N. Virginia +AWS_S3_OBJECT_PARAMETERS = { 'Expires': 'Thu, 31 Dec 2099 20:00:00 GMT', 'CacheControl': 'max-age=94608000', } +AWS_DEFAULT_ACL = 'public-read' +AWS_S3_CUSTOM_DOMAIN = '%s.s3.amazonaws.com' % settings.AWS_BUCKET_NAME +AWS_AUTO_CREATE_BUCKET = True +STATICFILES_STORAGE = 'evennia.contrib.aws-s3-cdn.S3Boto3Storage' +You may also store these as environment variables of the same name. + +UNINSTALLATION: + +If you haven't made changes to your static files (uploaded images, etc), +you can simply remove the lines you added to secret_settings.py. If you +have made changes and want to install at a later date, you can export +your files from your S3 bucket and put them in /static/ in the evennia +directory. + +LICENSE: + +aws-s3-cdn contrib is (c) 2020, trhr and released under BSD 3-Clause +License except where this license conflicts with the Evennia license. +Thank you to github.com/jschneier for contributions on django/boto3 classes. + +BSD 3-Clause License + +Copyright (c) 2008 - 2020, See AUTHORS file. +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + +1. Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. + +2. Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + +3. Neither the name of the copyright holder nor the names of its + contributors may be used to endorse or promote products derived from + this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +""" + +from django.core.exceptions import ImproperlyConfigured, SuspiciousOperation, SuspiciousFileOperation + +try: + from django.conf import settings as ev_settings + if not ev_settings.AWS_ACCESS_KEY_ID or not ev_settings.AWS_SECRET_ACCESS_KEY or not ev_settings.AWS_STORAGE_BUCKET_NAME or not ev_settings.AWS_S3_REGION_NAME: + raise ImproperlyConfigured("You must add AWS-specific settings to mygame/server/conf/secret_settings.py to use this plugin.") + + if 'mygame-evennia' == ev_settings.AWS_STORAGE_BUCKET_NAME: + raise ImproperlyConfigured("You must customize your AWS_STORAGE_BUCKET_NAME in mygame/server/conf/secret_settings.py; it must be unique among ALL other S3 users") + +except Exception as e: + print(e) + +import io +import mimetypes +import os +import posixpath +import threading +import warnings +from gzip import GzipFile +from tempfile import SpooledTemporaryFile +from django.core.files.base import File +from django.core.files.storage import Storage +from django.utils.deconstruct import deconstructible +from django.utils.encoding import (filepath_to_uri, force_bytes, force_text, smart_text) +from django.utils.timezone import is_naive, make_naive + +try: + from django.utils.six.moves.urllib import parse as urlparse +except ImportError: + from urllib import parse as urlparse + +try: + import boto3.session + from boto3 import __version__ as boto3_version + from botocore.client import Config + from botocore.exceptions import ClientError +except ImportError as e: + raise ImproperlyConfigured("Could not load Boto3's S3 bindings. %s Did you run 'pip install boto3?'" % e) + +boto3_version_info = tuple([int(i) for i in boto3_version.split('.')]) + + +def setting(name, default=None): + """ + Helper function to get a Django setting by name. If setting doesn't exists + it will return a default. + :param name: Name of setting + :type name: str + :param default: Value if setting is unfound + :returns: Setting's value + """ + return getattr(ev_settings, name, default) + + +def clean_name(name): + """ + Cleans the name so that Windows style paths work + """ + # Normalize Windows style paths + clean_name = posixpath.normpath(name).replace('\\', '/') + + # os.path.normpath() can strip trailing slashes so we implement + # a workaround here. + if name.endswith('/') and not clean_name.endswith('/'): + # Add a trailing slash as it was stripped. + clean_name = clean_name + '/' + + # Given an empty string, os.path.normpath() will return ., which we don't want + if clean_name == '.': + clean_name = '' + + return clean_name + + +def safe_join(base, *paths): + """ + A version of django.utils._os.safe_join for S3 paths. + Joins one or more path components to the base path component + intelligently. Returns a normalized version of the final path. + The final path must be located inside of the base path component + (otherwise a ValueError is raised). + Paths outside the base path indicate a possible security + sensitive operation. + """ + base_path = force_text(base) + base_path = base_path.rstrip('/') + paths = [force_text(p) for p in paths] + + final_path = base_path + '/' + for path in paths: + _final_path = posixpath.normpath(posixpath.join(final_path, path)) + # posixpath.normpath() strips the trailing /. Add it back. + if path.endswith('/') or _final_path + '/' == final_path: + _final_path += '/' + final_path = _final_path + if final_path == base_path: + final_path += '/' + + # Ensure final_path starts with base_path and that the next character after + # the base path is /. + base_path_len = len(base_path) + if (not final_path.startswith(base_path) or final_path[base_path_len] != '/'): + raise ValueError('the joined path is located outside of the base path' + ' component') + + return final_path.lstrip('/') + + +def check_location(storage): + if storage.location.startswith('/'): + correct = storage.location.lstrip('/') + raise ImproperlyConfigured( + "{}.location cannot begin with a leading slash. Found '{}'. Use '{}' instead.".format( + storage.__class__.__name__, + storage.location, + correct, + ) + ) + + +def lookup_env(names): + """ + Look up for names in environment. Returns the first element + found. + """ + for name in names: + value = os.environ.get(name) + if value: + return value + + +def get_available_overwrite_name(name, max_length): + if max_length is None or len(name) <= max_length: + return name + + # Adapted from Django + dir_name, file_name = os.path.split(name) + file_root, file_ext = os.path.splitext(file_name) + truncation = len(name) - max_length + + file_root = file_root[:-truncation] + if not file_root: + raise SuspiciousFileOperation( + 'aws-s3-cdn tried to truncate away entire filename "%s". ' + 'Please make sure that the corresponding file field ' + 'allows sufficient "max_length".' % name + ) + return os.path.join(dir_name, "{}{}".format(file_root, file_ext)) + + +@deconstructible +class S3Boto3StorageFile(File): + + """ + The default file object used by the S3Boto3Storage backend. + This file implements file streaming using boto's multipart + uploading functionality. The file can be opened in read or + write mode. + This class extends Django's File class. However, the contained + data is only the data contained in the current buffer. So you + should not access the contained file object directly. You should + access the data via this class. + Warning: This file *must* be closed using the close() method in + order to properly write the file to S3. Be sure to close the file + in your application. + """ + buffer_size = setting('AWS_S3_FILE_BUFFER_SIZE', 5242880) + + def __init__(self, name, mode, storage, buffer_size=None): + if 'r' in mode and 'w' in mode: + raise ValueError("Can't combine 'r' and 'w' in mode.") + self._storage = storage + self.name = name[len(self._storage.location):].lstrip('/') + self._mode = mode + self._force_mode = (lambda b: b) if 'b' in mode else force_text + self.obj = storage.bucket.Object(storage._encode_name(name)) + if 'w' not in mode: + # Force early RAII-style exception if object does not exist + self.obj.load() + self._is_dirty = False + self._raw_bytes_written = 0 + self._file = None + self._multipart = None + # 5 MB is the minimum part size (if there is more than one part). + # Amazon allows up to 10,000 parts. The default supports uploads + # up to roughly 50 GB. Increase the part size to accommodate + # for files larger than this. + if buffer_size is not None: + self.buffer_size = buffer_size + self._write_counter = 0 + + @property + def size(self): + return self.obj.content_length + + def _get_file(self): + if self._file is None: + self._file = SpooledTemporaryFile( + max_size=self._storage.max_memory_size, + suffix=".S3Boto3StorageFile", + dir=setting("FILE_UPLOAD_TEMP_DIR") + ) + if 'r' in self._mode: + self._is_dirty = False + self.obj.download_fileobj(self._file) + self._file.seek(0) + if self._storage.gzip and self.obj.content_encoding == 'gzip': + self._file = GzipFile(mode=self._mode, fileobj=self._file, mtime=0.0) + return self._file + + def _set_file(self, value): + self._file = value + + file = property(_get_file, _set_file) + + def read(self, *args, **kwargs): + if 'r' not in self._mode: + raise AttributeError("File was not opened in read mode.") + return self._force_mode(super(S3Boto3StorageFile, self).read(*args, **kwargs)) + + def readline(self, *args, **kwargs): + if 'r' not in self._mode: + raise AttributeError("File was not opened in read mode.") + return self._force_mode(super(S3Boto3StorageFile, self).readline(*args, **kwargs)) + + def write(self, content): + if 'w' not in self._mode: + raise AttributeError("File was not opened in write mode.") + self._is_dirty = True + if self._multipart is None: + self._multipart = self.obj.initiate_multipart_upload( + **self._storage._get_write_parameters(self.obj.key) + ) + if self.buffer_size <= self._buffer_file_size: + self._flush_write_buffer() + bstr = force_bytes(content) + self._raw_bytes_written += len(bstr) + return super(S3Boto3StorageFile, self).write(bstr) + + @property + def _buffer_file_size(self): + pos = self.file.tell() + self.file.seek(0, os.SEEK_END) + length = self.file.tell() + self.file.seek(pos) + return length + + def _flush_write_buffer(self): + """ + Flushes the write buffer. + """ + if self._buffer_file_size: + self._write_counter += 1 + self.file.seek(0) + part = self._multipart.Part(self._write_counter) + part.upload(Body=self.file.read()) + self.file.seek(0) + self.file.truncate() + + def _create_empty_on_close(self): + """ + Attempt to create an empty file for this key when this File is closed if no bytes + have been written and no object already exists on S3 for this key. + This behavior is meant to mimic the behavior of Django's builtin FileSystemStorage, + where files are always created after they are opened in write mode: + f = storage.open("file.txt", mode="w") + f.close() + """ + assert "w" in self._mode + assert self._raw_bytes_written == 0 + + try: + # Check if the object exists on the server; if so, don't do anything + self.obj.load() + except ClientError as err: + if err.response["ResponseMetadata"]["HTTPStatusCode"] == 404: + self.obj.put( + Body=b"", **self._storage._get_write_parameters(self.obj.key) + ) + else: + raise + + def close(self): + if self._is_dirty: + self._flush_write_buffer() + # TODO: Possibly cache the part ids as they're being uploaded + # instead of requesting parts from server. For now, emulating + # s3boto's behavior. + parts = [{'ETag': part.e_tag, 'PartNumber': part.part_number} + for part in self._multipart.parts.all()] + self._multipart.complete( + MultipartUpload={'Parts': parts}) + else: + if self._multipart is not None: + self._multipart.abort() + if 'w' in self._mode and self._raw_bytes_written == 0: + self._create_empty_on_close() + if self._file is not None: + self._file.close() + self._file = None + + +@deconstructible +class S3Boto3Storage(Storage): + """ + Amazon Simple Storage Service using Boto3 + This storage backend supports opening files in read or write + mode and supports streaming(buffering) data in chunks to S3 + when writing. + """ + default_content_type = 'application/octet-stream' + # If config provided in init, signature_version and addressing_style settings/args are ignored. + config = None + + # used for looking up the access and secret key from env vars + access_key_names = ['AWS_S3_ACCESS_KEY_ID', 'AWS_ACCESS_KEY_ID'] + secret_key_names = ['AWS_S3_SECRET_ACCESS_KEY', 'AWS_SECRET_ACCESS_KEY'] + security_token_names = ['AWS_SESSION_TOKEN', 'AWS_SECURITY_TOKEN'] + security_token = None + + access_key = setting('AWS_S3_ACCESS_KEY_ID', setting('AWS_ACCESS_KEY_ID')) + secret_key = setting('AWS_S3_SECRET_ACCESS_KEY', setting('AWS_SECRET_ACCESS_KEY')) + file_overwrite = setting('AWS_S3_FILE_OVERWRITE', True) + object_parameters = setting('AWS_S3_OBJECT_PARAMETERS', {}) + bucket_name = setting('AWS_STORAGE_BUCKET_NAME') + auto_create_bucket = setting('AWS_AUTO_CREATE_BUCKET', False) + default_acl = setting('AWS_DEFAULT_ACL', 'public-read') + bucket_acl = setting('AWS_BUCKET_ACL', default_acl) + querystring_auth = setting('AWS_QUERYSTRING_AUTH', True) + querystring_expire = setting('AWS_QUERYSTRING_EXPIRE', 3600) + signature_version = setting('AWS_S3_SIGNATURE_VERSION') + reduced_redundancy = setting('AWS_REDUCED_REDUNDANCY', False) + location = setting('AWS_LOCATION', '') + encryption = setting('AWS_S3_ENCRYPTION', False) + custom_domain = setting('AWS_S3_CUSTOM_DOMAIN') + addressing_style = setting('AWS_S3_ADDRESSING_STYLE') + secure_urls = setting('AWS_S3_SECURE_URLS', True) + file_name_charset = setting('AWS_S3_FILE_NAME_CHARSET', 'utf-8') + gzip = setting('AWS_IS_GZIPPED', False) + preload_metadata = setting('AWS_PRELOAD_METADATA', False) + gzip_content_types = setting('GZIP_CONTENT_TYPES', ( + 'text/css', + 'text/javascript', + 'application/javascript', + 'application/x-javascript', + 'image/svg+xml', + )) + url_protocol = setting('AWS_S3_URL_PROTOCOL', 'http:') + endpoint_url = setting('AWS_S3_ENDPOINT_URL') + proxies = setting('AWS_S3_PROXIES') + region_name = setting('AWS_S3_REGION_NAME') + use_ssl = setting('AWS_S3_USE_SSL', True) + verify = setting('AWS_S3_VERIFY', None) + max_memory_size = setting('AWS_S3_MAX_MEMORY_SIZE', 0) + + def __init__(self, acl=None, bucket=None, **settings): + # check if some of the settings we've provided as class attributes + # need to be overwritten with values passed in here + for name, value in settings.items(): + if hasattr(self, name): + setattr(self, name, value) + + check_location(self) + + # Backward-compatibility: given the anteriority of the SECURE_URL setting + # we fall back to https if specified in order to avoid the construction + # of unsecure urls. + if self.secure_urls: + self.url_protocol = 'https:' + + self._entries = {} + self._bucket = None + self._connections = threading.local() + + self.access_key, self.secret_key = self._get_access_keys() + self.security_token = self._get_security_token() + + if not self.config: + kwargs = dict( + s3={'addressing_style': self.addressing_style}, + signature_version=self.signature_version, + ) + + if boto3_version_info >= (1, 4, 4): + kwargs['proxies'] = self.proxies + else: + warnings.warn( + "In version 1.10 of django-storages the minimum required version of " + "boto3 will be 1.4.4. You have %s " % boto3_version_info + ) + self.config = Config(**kwargs) + + + def __getstate__(self): + state = self.__dict__.copy() + state.pop('_connections', None) + state.pop('_bucket', None) + return state + + def __setstate__(self, state): + state['_connections'] = threading.local() + state['_bucket'] = None + self.__dict__ = state + + @property + def connection(self): + connection = getattr(self._connections, 'connection', None) + if connection is None: + session = boto3.session.Session() + self._connections.connection = session.resource( + 's3', + aws_access_key_id=self.access_key, + aws_secret_access_key=self.secret_key, + aws_session_token=self.security_token, + region_name=self.region_name, + use_ssl=self.use_ssl, + endpoint_url=self.endpoint_url, + config=self.config, + verify=self.verify, + ) + return self._connections.connection + + @property + def bucket(self): + """ + Get the current bucket. If there is no current bucket object + create it. + """ + if self._bucket is None: + self._bucket = self._get_or_create_bucket(self.bucket_name) + return self._bucket + + @property + def entries(self): + """ + Get the locally cached files for the bucket. + """ + if self.preload_metadata and not self._entries: + self._entries = { + self._decode_name(entry.key): entry + for entry in self.bucket.objects.filter(Prefix=self.location) + } + return self._entries + + def _get_access_keys(self): + """ + Gets the access keys to use when accessing S3. If none is + provided in the settings then get them from the environment + variables. + """ + access_key = self.access_key or lookup_env(S3Boto3Storage.access_key_names) + secret_key = self.secret_key or lookup_env(S3Boto3Storage.secret_key_names) + return access_key, secret_key + + def _get_security_token(self): + """ + Gets the security token to use when accessing S3. Get it from + the environment variables. + """ + security_token = self.security_token or lookup_env(S3Boto3Storage.security_token_names) + return security_token + + def _get_or_create_bucket(self, name): + """ + Retrieves a bucket if it exists, otherwise creates it. + """ + bucket = self.connection.Bucket(name) + if self.auto_create_bucket: + try: + # Directly call head_bucket instead of bucket.load() because head_bucket() + # fails on wrong region, while bucket.load() does not. + bucket.meta.client.head_bucket(Bucket=name) + except ClientError as err: + if err.response['ResponseMetadata']['HTTPStatusCode'] == 301: + raise ImproperlyConfigured("Bucket %s exists, but in a different " + "region than we are connecting to. Set " + "the region to connect to by setting " + "AWS_S3_REGION_NAME to the correct region." % name) + + elif err.response['ResponseMetadata']['HTTPStatusCode'] == 404: + # Notes: When using the us-east-1 Standard endpoint, you can create + # buckets in other regions. The same is not true when hitting region specific + # endpoints. However, when you create the bucket not in the same region, the + # connection will fail all future requests to the Bucket after the creation + # (301 Moved Permanently). + # + # For simplicity, we enforce in S3Boto3Storage that any auto-created + # bucket must match the region that the connection is for. + # + # Also note that Amazon specifically disallows "us-east-1" when passing bucket + # region names; LocationConstraint *must* be blank to create in US Standard. + + if self.bucket_acl: + bucket_params = {'ACL': self.bucket_acl} + else: + bucket_params = {} + region_name = self.connection.meta.client.meta.region_name + if region_name != 'us-east-1': + bucket_params['CreateBucketConfiguration'] = { + 'LocationConstraint': region_name} + bucket.create(**bucket_params) + else: + raise + return bucket + + def _clean_name(self, name): + """ + Cleans the name so that Windows style paths work + """ + # Normalize Windows style paths + clean_name = posixpath.normpath(name).replace('\\', '/') + + # os.path.normpath() can strip trailing slashes so we implement + # a workaround here. + if name.endswith('/') and not clean_name.endswith('/'): + # Add a trailing slash as it was stripped. + clean_name += '/' + return clean_name + + def _normalize_name(self, name): + """ + Normalizes the name so that paths like /path/to/ignored/../something.txt + work. We check to make sure that the path pointed to is not outside + the directory specified by the LOCATION setting. + """ + try: + return safe_join(self.location, name) + except ValueError: + raise SuspiciousOperation("Attempted access to '%s' denied." % + name) + + def _encode_name(self, name): + return smart_text(name, encoding=self.file_name_charset) + + def _decode_name(self, name): + return force_text(name, encoding=self.file_name_charset) + + def _compress_content(self, content): + """Gzip a given string content.""" + content.seek(0) + zbuf = io.BytesIO() + # The GZIP header has a modification time attribute (see http://www.zlib.org/rfc-gzip.html) + # This means each time a file is compressed it changes even if the other contents don't change + # For S3 this defeats detection of changes using MD5 sums on gzipped files + # Fixing the mtime at 0.0 at compression time avoids this problem + zfile = GzipFile(mode='wb', fileobj=zbuf, mtime=0.0) + try: + zfile.write(force_bytes(content.read())) + finally: + zfile.close() + zbuf.seek(0) + # Boto 2 returned the InMemoryUploadedFile with the file pointer replaced, + # but Boto 3 seems to have issues with that. No need for fp.name in Boto3 + # so just returning the BytesIO directly + return zbuf + + def _open(self, name, mode='rb'): + name = self._normalize_name(self._clean_name(name)) + try: + f = S3Boto3StorageFile(name, mode, self) + except ClientError as err: + if err.response['ResponseMetadata']['HTTPStatusCode'] == 404: + raise IOError('File does not exist: %s' % name) + raise # Let it bubble up if it was some other error + return f + + def _save(self, name, content): + cleaned_name = self._clean_name(name) + name = self._normalize_name(cleaned_name) + params = self._get_write_parameters(name, content) + + if (self.gzip and + params['ContentType'] in self.gzip_content_types and + 'ContentEncoding' not in params): + content = self._compress_content(content) + params['ContentEncoding'] = 'gzip' + + encoded_name = self._encode_name(name) + obj = self.bucket.Object(encoded_name) + if self.preload_metadata: + self._entries[encoded_name] = obj + + content.seek(0, os.SEEK_SET) + obj.upload_fileobj(content, ExtraArgs=params) + return cleaned_name + + def delete(self, name): + name = self._normalize_name(self._clean_name(name)) + self.bucket.Object(self._encode_name(name)).delete() + + if name in self._entries: + del self._entries[name] + + def exists(self, name): + name = self._normalize_name(self._clean_name(name)) + if self.entries: + return name in self.entries + try: + self.connection.meta.client.head_object(Bucket=self.bucket_name, Key=name) + return True + except ClientError: + return False + + def listdir(self, name): + path = self._normalize_name(self._clean_name(name)) + # The path needs to end with a slash, but if the root is empty, leave + # it. + if path and not path.endswith('/'): + path += '/' + + directories = [] + files = [] + paginator = self.connection.meta.client.get_paginator('list_objects') + pages = paginator.paginate(Bucket=self.bucket_name, Delimiter='/', Prefix=path) + for page in pages: + for entry in page.get('CommonPrefixes', ()): + directories.append(posixpath.relpath(entry['Prefix'], path)) + for entry in page.get('Contents', ()): + files.append(posixpath.relpath(entry['Key'], path)) + return directories, files + + def size(self, name): + name = self._normalize_name(self._clean_name(name)) + if self.entries: + entry = self.entries.get(name) + if entry: + return entry.size if hasattr(entry, 'size') else entry.content_length + return 0 + return self.bucket.Object(self._encode_name(name)).content_length + + def _get_write_parameters(self, name, content=None): + params = {} + + if self.encryption: + params['ServerSideEncryption'] = 'AES256' + if self.reduced_redundancy: + params['StorageClass'] = 'REDUCED_REDUNDANCY' + if self.default_acl: + params['ACL'] = self.default_acl + + _type, encoding = mimetypes.guess_type(name) + content_type = getattr(content, 'content_type', None) + content_type = content_type or _type or self.default_content_type + + params['ContentType'] = content_type + if encoding: + params['ContentEncoding'] = encoding + + params.update(self.get_object_parameters(name)) + return params + + def get_object_parameters(self, name): + """ + Returns a dictionary that is passed to file upload. Override this + method to adjust this on a per-object basis to set e.g ContentDisposition. + By default, returns the value of AWS_S3_OBJECT_PARAMETERS. + Setting ContentEncoding will prevent objects from being automatically gzipped. + """ + return self.object_parameters.copy() + + def get_modified_time(self, name): + """ + Returns an (aware) datetime object containing the last modified time if + USE_TZ is True, otherwise returns a naive datetime in the local timezone. + """ + name = self._normalize_name(self._clean_name(name)) + entry = self.entries.get(name) + # only call self.bucket.Object() if the key is not found + # in the preloaded metadata. + if entry is None: + entry = self.bucket.Object(self._encode_name(name)) + if setting('USE_TZ'): + # boto3 returns TZ aware timestamps + return entry.last_modified + else: + return make_naive(entry.last_modified) + + def modified_time(self, name): + """Returns a naive datetime object containing the last modified time.""" + # If USE_TZ=False then get_modified_time will return a naive datetime + # so we just return that, else we have to localize and strip the tz + mtime = self.get_modified_time(name) + return mtime if is_naive(mtime) else make_naive(mtime) + + def _strip_signing_parameters(self, url): + # Boto3 does not currently support generating URLs that are unsigned. Instead we + # take the signed URLs and strip any querystring params related to signing and expiration. + # Note that this may end up with URLs that are still invalid, especially if params are + # passed in that only work with signed URLs, e.g. response header params. + # The code attempts to strip all query parameters that match names of known parameters + # from v2 and v4 signatures, regardless of the actual signature version used. + split_url = urlparse.urlsplit(url) + qs = urlparse.parse_qsl(split_url.query, keep_blank_values=True) + blacklist = { + 'x-amz-algorithm', 'x-amz-credential', 'x-amz-date', + 'x-amz-expires', 'x-amz-signedheaders', 'x-amz-signature', + 'x-amz-security-token', 'awsaccesskeyid', 'expires', 'signature', + } + filtered_qs = ((key, val) for key, val in qs if key.lower() not in blacklist) + # Note: Parameters that did not have a value in the original query string will have + # an '=' sign appended to it, e.g ?foo&bar becomes ?foo=&bar= + joined_qs = ('='.join(keyval) for keyval in filtered_qs) + split_url = split_url._replace(query="&".join(joined_qs)) + return split_url.geturl() + + def url(self, name, parameters=None, expire=None): + # Preserve the trailing slash after normalizing the path. + name = self._normalize_name(self._clean_name(name)) + if self.custom_domain: + return "{}//{}/{}".format(self.url_protocol, + self.custom_domain, filepath_to_uri(name)) + if expire is None: + expire = self.querystring_expire + + params = parameters.copy() if parameters else {} + params['Bucket'] = self.bucket.name + params['Key'] = self._encode_name(name) + url = self.bucket.meta.client.generate_presigned_url('get_object', Params=params, + ExpiresIn=expire) + if self.querystring_auth: + return url + return self._strip_signing_parameters(url) + + def get_available_name(self, name, max_length=None): + """Overwrite existing file with the same name.""" + name = self._clean_name(name) + if self.file_overwrite: + return get_available_overwrite_name(name, max_length) + return super(S3Boto3Storage, self).get_available_name(name, max_length) From 75321b7116a036e84bc156a480acce47088f03b3 Mon Sep 17 00:00:00 2001 From: trhr Date: Sun, 23 Feb 2020 19:27:41 -0600 Subject: [PATCH 2/7] Updated documentation; cleaned up --- evennia/contrib/aws-s3-cdn.py | 345 ++++++++++++++++++++++++---------- 1 file changed, 249 insertions(+), 96 deletions(-) diff --git a/evennia/contrib/aws-s3-cdn.py b/evennia/contrib/aws-s3-cdn.py index dbbdf59063..e79ccab2e3 100644 --- a/evennia/contrib/aws-s3-cdn.py +++ b/evennia/contrib/aws-s3-cdn.py @@ -1,16 +1,67 @@ """ -This plugin migrates the Web-based portion of Evennia, -namely images, javascript, and other items located -inside staticfiles into Amazon AWS (S3) for hosting. +ABOUT THIS PLUGIN: + +This plugin migrates the Web-based portion of Evennia, namely images, +javascript, and other items located inside staticfiles into Amazon AWS (S3) for hosting. + +Files hosted on S3 are "in the cloud," and while your personal +server may be sufficient for serving multimedia to a minimal number of users, +the perfect use case for this plugin would be: + +1) Servers supporting heavy web-based traffic (webclient, etc) +2) With a sizeable number of users +3) Where the users are globally distributed +4) Where multimedia files are served to users as a part of gameplay + +Bottom line - if you're sending an image to a player every time they traverse a +map, the bandwidth reduction will be substantial. If not, probably skip +this one. + +Note that storing and serving files via S3 is not technically free outside of +Amazon's "free tier" offering, which you may or may not be eligible for; +evennia's base install currently requires 1.5MB of storage space on S3, +making the current total cost to install this plugin ~$0.0005 per year. If +you have substantial media assets and intend to serve them to many users, +caveat emptor on a total cost of ownership - check AWS's pricing structure. + + +TECHNICAL DETAILS: + +This is a drop-in replacement that operates deeper than all of Evennia's code, +so your existing code does not need to change at all to support it. + +For example, when Evennia (or Django), tries to save a file permanently +(say, an image uploaded by a user), the save (or load) communication follows the path: + +Evennia -> Django +Django -> Storage backend +Storage backend -> file storage location (e.g. hard drive) + +https://docs.djangoproject.com/en/3.0/ref/settings/#std:setting-STATICFILES_STORAGE + +This plugin, when enabled, overrides the default storage backend, +which defaults to saving files at mygame/website/, instead, +sending the files to S3 via the storage backend defined herein. + +There is no way (or need) to directly access or use the functions here with +other contributions or custom code. Simply work how you would normally, Django +will handle the rest. INSTALLATION: -1) If you don't have an AWS S3 account, you should create one now. +1) If you don't have an AWS S3 account, you should create one at +https://aws.amazon.com/ - documentation for AWS S3 is available at: +https://docs.aws.amazon.com/AmazonS3/latest/gsg/GetStartedWithS3.html -Credentials required are an AWS IAM Access Key and Secret Keys, +Credentials required within the app are AWS IAM Access Key and Secret Keys, which can be generated/found in the AWS Console. -Example IAM Control Policy Permissions, if desired: +The following example IAM Control Policy Permissions can be added to +the IAM service inside AWS. Documentation for this can be found here: +https://docs.aws.amazon.com/IAM/latest/UserGuide/introduction.html + +Note that this is only required if you want to tightly secure the roles +that this plugin has access to. { "Version": "2012-10-17", @@ -50,24 +101,39 @@ Advanced Users: The second IAM statement, CreateBucket, is only needed for initial installation. You can remove it later, or you can create the bucket and set the ACL yourself before you continue. -2) This package requires the dependency "boto3," the official +2) This package requires the dependency "boto3 >= 1.4.4" the official AWS python package. You can install it with 'pip install boto3' while inside your evennia virtual environment (or, simply in your shell if you don't use a virtual environment). 3) Customize the variables defined below in secret_settings.py, then run 'evennia stop', 'evennia start', 'evennia collectstatic' +No further configuration is needed. -AWS_ACCESS_KEY_ID = 'EUHUB20BU08AEU7' # CHANGE ME! -AWS_SECRET_ACCESS_KEY = 'a/uoexauodabuq4j;kmw;kvka0d2' # CHANGE ME! +4) Confirm that web assets are being served from S3 by visiting your +website, then checking the source of any image (for instance, the logo). +It should read https://your-bucket-name.s3.amazonaws.com/path/to/file + +START OF SECRET_SETTINGS.PY COPY/PASTE >>> + +AWS_ACCESS_KEY_ID = 'THIS_IS_PROVIDED_BY_AMAZON' +AWS_SECRET_ACCESS_KEY = 'THIS_IS_PROVIDED_BY_AMAZON' AWS_STORAGE_BUCKET_NAME = 'mygame-evennia' # CHANGE ME! + +The settings below need to go in secret_settings,py as well, but will +not need customization unless you want to do something particularly fancy. + AWS_S3_REGION_NAME = 'us-east-1' # N. Virginia AWS_S3_OBJECT_PARAMETERS = { 'Expires': 'Thu, 31 Dec 2099 20:00:00 GMT', 'CacheControl': 'max-age=94608000', } AWS_DEFAULT_ACL = 'public-read' AWS_S3_CUSTOM_DOMAIN = '%s.s3.amazonaws.com' % settings.AWS_BUCKET_NAME AWS_AUTO_CREATE_BUCKET = True STATICFILES_STORAGE = 'evennia.contrib.aws-s3-cdn.S3Boto3Storage' -You may also store these as environment variables of the same name. + +<<< END OF SECRET_SETTINGS.PY COPY/PASTE + +You may also store these keys as environment variables of the same name. +For advanced configuration, refer to the docs for django-storages. UNINSTALLATION: @@ -79,40 +145,59 @@ directory. LICENSE: -aws-s3-cdn contrib is (c) 2020, trhr and released under BSD 3-Clause -License except where this license conflicts with the Evennia license. -Thank you to github.com/jschneier for contributions on django/boto3 classes. +Draws heavily from code provided by django-storages, for which these contributors +are authors: -BSD 3-Clause License +Marty Alchin (S3) +David Larlet (S3) +Arne Brodowski (S3) +Sebastian Serrano (S3) +Andrew McClain (MogileFS) +Rafal Jonca (FTP) +Chris McCormick (S3 with Boto) +Ivanov E. (Database) +Ariel Núñez (packaging) +Wim Leers (SymlinkOrCopy + patches) +Michael Elsdörfer (Overwrite + PEP8 compatibility) +Christian Klein (CouchDB) +Rich Leland (Mosso Cloud Files) +Jason Christa (patches) +Adam Nelson (patches) +Erik CW (S3 encryption) +Axel Gembe (Hash path) +Waldemar Kornewald (MongoDB) +Russell Keith-Magee (Apache LibCloud patches) +Jannis Leidel (S3 and GS with Boto) +Andrei Coman (Azure) +Chris Streeter (S3 with Boto) +Josh Schneier (Fork maintainer, Bugfixes, Py3K) +Anthony Monthe (Dropbox) +EunPyo (Andrew) Hong (Azure) +Michael Barrientos (S3 with Boto3) +piglei (patches) +Matt Braymer-Hayes (S3 with Boto3) +Eirik Martiniussen Sylliaas (Google Cloud Storage native support) +Jody McIntyre (Google Cloud Storage native support) +Stanislav Kaledin (Bug fixes in SFTPStorage) +Filip Vavera (Google Cloud MIME types support) +Max Malysh (Dropbox large file support) +Scott White (Google Cloud updates) +Alex Watt (Google Cloud Storage patch) +Jumpei Yoshimura (S3 docs) +Jon Dufresne +Rodrigo Gadea (Dropbox fixes) +Martey Dodoo +Chris Rink +Shaung Cheng (S3 docs) +Andrew Perry (Bug fixes in SFTPStorage) -Copyright (c) 2008 - 2020, See AUTHORS file. -All rights reserved. +The repurposed code from django-storages is released under BSD 3-Clause, +same as Evennia, so for detailed licensing, refer to the Evennia license. -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are met: - -1. Redistributions of source code must retain the above copyright notice, this - list of conditions and the following disclaimer. - -2. Redistributions in binary form must reproduce the above copyright notice, - this list of conditions and the following disclaimer in the documentation - and/or other materials provided with the distribution. - -3. Neither the name of the copyright holder nor the names of its - contributors may be used to endorse or promote products derived from - this software without specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" -AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE -IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE -DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE -FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL -DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR -SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, -OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +VERSIONING: +This is confirmed to work for Django 2 and Django 3. +' """ from django.core.exceptions import ImproperlyConfigured, SuspiciousOperation, SuspiciousFileOperation @@ -133,7 +218,6 @@ import mimetypes import os import posixpath import threading -import warnings from gzip import GzipFile from tempfile import SpooledTemporaryFile from django.core.files.base import File @@ -160,45 +244,35 @@ boto3_version_info = tuple([int(i) for i in boto3_version.split('.')]) def setting(name, default=None): """ - Helper function to get a Django setting by name. If setting doesn't exists + Helper function to get a Django setting by name. If setting doesn't exist it will return a default. - :param name: Name of setting - :type name: str - :param default: Value if setting is unfound - :returns: Setting's value + + Args: + name (str): A Django setting name + + Returns: + The value of the setting variable by that name + """ return getattr(ev_settings, name, default) -def clean_name(name): - """ - Cleans the name so that Windows style paths work - """ - # Normalize Windows style paths - clean_name = posixpath.normpath(name).replace('\\', '/') - - # os.path.normpath() can strip trailing slashes so we implement - # a workaround here. - if name.endswith('/') and not clean_name.endswith('/'): - # Add a trailing slash as it was stripped. - clean_name = clean_name + '/' - - # Given an empty string, os.path.normpath() will return ., which we don't want - if clean_name == '.': - clean_name = '' - - return clean_name - - def safe_join(base, *paths): """ - A version of django.utils._os.safe_join for S3 paths. + Helper function, a version of django.utils._os.safe_join for S3 paths. Joins one or more path components to the base path component intelligently. Returns a normalized version of the final path. The final path must be located inside of the base path component - (otherwise a ValueError is raised). - Paths outside the base path indicate a possible security - sensitive operation. + (otherwise a ValueError is raised). Paths outside the base path + indicate a possible security sensitive operation. + + Args: + base (str): A path string to the base of the staticfiles + *paths (list): A list of paths as referenced from the base path + + Returns: + final_path (str): A joined path, base + filepath + """ base_path = force_text(base) base_path = base_path.rstrip('/') @@ -225,6 +299,17 @@ def safe_join(base, *paths): def check_location(storage): + """ + Helper function to make sure that the storage location is configured correctly. + + Args: + storage (Storage): A Storage object (Django) + + Raises: + ImproperlyConfigured: If the storage location is not configured correctly, + this is raised. + + """ if storage.location.startswith('/'): correct = storage.location.lstrip('/') raise ImproperlyConfigured( @@ -238,8 +323,14 @@ def check_location(storage): def lookup_env(names): """ - Look up for names in environment. Returns the first element - found. + Helper function for looking up names in env vars. Returns the first element found. + + Args: + names (str): A list of environment variables + + Returns: + value (str): The value of the found environment variable. + """ for name in names: value = os.environ.get(name) @@ -248,6 +339,16 @@ def lookup_env(names): def get_available_overwrite_name(name, max_length): + """ + Helper function indicating files that will be overwritten during trunc. + + Args: + name (str): The name of the file + max_length (int): The maximum length of a filename + + Returns: + joined (path): A joined path including directory, file, and extension + """ if max_length is None or len(name) <= max_length: return name @@ -285,6 +386,15 @@ class S3Boto3StorageFile(File): buffer_size = setting('AWS_S3_FILE_BUFFER_SIZE', 5242880) def __init__(self, name, mode, storage, buffer_size=None): + """ + Initializes the File object. + + Args: + name (str): The name of the file + mode (str): The access mode ('r' or 'w') + storage (Storage): The Django Storage object + buffer_size (int): The buffer size, for multipart uploads + """ if 'r' in mode and 'w' in mode: raise ValueError("Can't combine 'r' and 'w' in mode.") self._storage = storage @@ -309,9 +419,15 @@ class S3Boto3StorageFile(File): @property def size(self): + """ + Helper property to return filesize + """ return self.obj.content_length def _get_file(self): + """ + Helper function to manage zipping and temporary files + """ if self._file is None: self._file = SpooledTemporaryFile( max_size=self._storage.max_memory_size, @@ -332,16 +448,26 @@ class S3Boto3StorageFile(File): file = property(_get_file, _set_file) def read(self, *args, **kwargs): + """ + Checks if file is in read mode; then continues to boto3 operation + """ if 'r' not in self._mode: raise AttributeError("File was not opened in read mode.") - return self._force_mode(super(S3Boto3StorageFile, self).read(*args, **kwargs)) + return self._force_mode(super().read(*args, **kwargs)) def readline(self, *args, **kwargs): + """ + Checks if file is in read mode; then continues to boto3 operation + """ if 'r' not in self._mode: raise AttributeError("File was not opened in read mode.") - return self._force_mode(super(S3Boto3StorageFile, self).readline(*args, **kwargs)) + return self._force_mode(super().readline(*args, **kwargs)) def write(self, content): + """ + Checks if file is in write mode or needs multipart handling, + then continues to boto3 operation. + """ if 'w' not in self._mode: raise AttributeError("File was not opened in write mode.") self._is_dirty = True @@ -353,7 +479,7 @@ class S3Boto3StorageFile(File): self._flush_write_buffer() bstr = force_bytes(content) self._raw_bytes_written += len(bstr) - return super(S3Boto3StorageFile, self).write(bstr) + return super().write(bstr) @property def _buffer_file_size(self): @@ -383,6 +509,9 @@ class S3Boto3StorageFile(File): where files are always created after they are opened in write mode: f = storage.open("file.txt", mode="w") f.close() + + Raises: + Exception: Raised if a 404 error occurs """ assert "w" in self._mode assert self._raw_bytes_written == 0 @@ -399,11 +528,11 @@ class S3Boto3StorageFile(File): raise def close(self): + """ + Manages file closing after multipart uploads + """ if self._is_dirty: self._flush_write_buffer() - # TODO: Possibly cache the part ids as they're being uploaded - # instead of requesting parts from server. For now, emulating - # s3boto's behavior. parts = [{'ETag': part.e_tag, 'PartNumber': part.part_number} for part in self._multipart.parts.all()] self._multipart.complete( @@ -472,8 +601,10 @@ class S3Boto3Storage(Storage): max_memory_size = setting('AWS_S3_MAX_MEMORY_SIZE', 0) def __init__(self, acl=None, bucket=None, **settings): - # check if some of the settings we've provided as class attributes - # need to be overwritten with values passed in here + """ + Check if some of the settings we've provided as class attributes + need to be overwritten with values passed in here. + """ for name, value in settings.items(): if hasattr(self, name): setattr(self, name, value) @@ -501,14 +632,8 @@ class S3Boto3Storage(Storage): if boto3_version_info >= (1, 4, 4): kwargs['proxies'] = self.proxies - else: - warnings.warn( - "In version 1.10 of django-storages the minimum required version of " - "boto3 will be 1.4.4. You have %s " % boto3_version_info - ) self.config = Config(**kwargs) - def __getstate__(self): state = self.__dict__.copy() state.pop('_connections', None) @@ -522,6 +647,9 @@ class S3Boto3Storage(Storage): @property def connection(self): + """ + Creates the actual connection to S3 + """ connection = getattr(self._connections, 'connection', None) if connection is None: session = boto3.session.Session() @@ -673,6 +801,9 @@ class S3Boto3Storage(Storage): return zbuf def _open(self, name, mode='rb'): + """ + Opens the file, if it exists. + """ name = self._normalize_name(self._clean_name(name)) try: f = S3Boto3StorageFile(name, mode, self) @@ -683,6 +814,9 @@ class S3Boto3Storage(Storage): return f def _save(self, name, content): + """ + Stitches and cleans multipart uploads; normalizes file paths. + """ cleaned_name = self._clean_name(name) name = self._normalize_name(cleaned_name) params = self._get_write_parameters(name, content) @@ -703,6 +837,9 @@ class S3Boto3Storage(Storage): return cleaned_name def delete(self, name): + """ + Deletes a file from S3. + """ name = self._normalize_name(self._clean_name(name)) self.bucket.Object(self._encode_name(name)).delete() @@ -710,6 +847,9 @@ class S3Boto3Storage(Storage): del self._entries[name] def exists(self, name): + """ + Checks if file exists. + """ name = self._normalize_name(self._clean_name(name)) if self.entries: return name in self.entries @@ -720,6 +860,10 @@ class S3Boto3Storage(Storage): return False def listdir(self, name): + """ + Translational function to go from S3 file paths to the format + Django's listdir expects. + """ path = self._normalize_name(self._clean_name(name)) # The path needs to end with a slash, but if the root is empty, leave # it. @@ -738,6 +882,9 @@ class S3Boto3Storage(Storage): return directories, files def size(self, name): + """ + Gets the filesize of a remote file. + """ name = self._normalize_name(self._clean_name(name)) if self.entries: entry = self.entries.get(name) @@ -794,19 +941,22 @@ class S3Boto3Storage(Storage): return make_naive(entry.last_modified) def modified_time(self, name): - """Returns a naive datetime object containing the last modified time.""" - # If USE_TZ=False then get_modified_time will return a naive datetime - # so we just return that, else we have to localize and strip the tz + """Returns a naive datetime object containing the last modified time. + If USE_TZ=False then get_modified_time will return a naive datetime + so we just return that, else we have to localize and strip the tz + """ mtime = self.get_modified_time(name) return mtime if is_naive(mtime) else make_naive(mtime) def _strip_signing_parameters(self, url): - # Boto3 does not currently support generating URLs that are unsigned. Instead we - # take the signed URLs and strip any querystring params related to signing and expiration. - # Note that this may end up with URLs that are still invalid, especially if params are - # passed in that only work with signed URLs, e.g. response header params. - # The code attempts to strip all query parameters that match names of known parameters - # from v2 and v4 signatures, regardless of the actual signature version used. + """ + Boto3 does not currently support generating URLs that are unsigned. Instead we + take the signed URLs and strip any querystring params related to signing and expiration. + Note that this may end up with URLs that are still invalid, especially if params are + passed in that only work with signed URLs, e.g. response header params. + The code attempts to strip all query parameters that match names of known parameters + from v2 and v4 signatures, regardless of the actual signature version used. + """ split_url = urlparse.urlsplit(url) qs = urlparse.parse_qsl(split_url.query, keep_blank_values=True) blacklist = { @@ -822,6 +972,9 @@ class S3Boto3Storage(Storage): return split_url.geturl() def url(self, name, parameters=None, expire=None): + """ + Returns the URL of a remotely-hosted file + """ # Preserve the trailing slash after normalizing the path. name = self._normalize_name(self._clean_name(name)) if self.custom_domain: @@ -844,4 +997,4 @@ class S3Boto3Storage(Storage): name = self._clean_name(name) if self.file_overwrite: return get_available_overwrite_name(name, max_length) - return super(S3Boto3Storage, self).get_available_name(name, max_length) + return super().get_available_name(name, max_length) From f7ee46e8d836c615fa91f148543c00f4fb5909b8 Mon Sep 17 00:00:00 2001 From: trhr Date: Sun, 23 Feb 2020 19:52:55 -0600 Subject: [PATCH 3/7] Blacked source --- evennia/contrib/aws-s3-cdn.py | 319 +++++++++++++++++++--------------- 1 file changed, 176 insertions(+), 143 deletions(-) diff --git a/evennia/contrib/aws-s3-cdn.py b/evennia/contrib/aws-s3-cdn.py index e79ccab2e3..41a6e54113 100644 --- a/evennia/contrib/aws-s3-cdn.py +++ b/evennia/contrib/aws-s3-cdn.py @@ -107,8 +107,7 @@ while inside your evennia virtual environment (or, simply in your shell if you don't use a virtual environment). 3) Customize the variables defined below in secret_settings.py, -then run 'evennia stop', 'evennia start', 'evennia collectstatic' -No further configuration is needed. +then run 'evennia reboot.' No further configuration is needed. 4) Confirm that web assets are being served from S3 by visiting your website, then checking the source of any image (for instance, the logo). @@ -118,13 +117,14 @@ START OF SECRET_SETTINGS.PY COPY/PASTE >>> AWS_ACCESS_KEY_ID = 'THIS_IS_PROVIDED_BY_AMAZON' AWS_SECRET_ACCESS_KEY = 'THIS_IS_PROVIDED_BY_AMAZON' -AWS_STORAGE_BUCKET_NAME = 'mygame-evennia' # CHANGE ME! +AWS_STORAGE_BUCKET_NAME = 'mygame-evennia' # CHANGE ME! I suggest yourgamename-evennia The settings below need to go in secret_settings,py as well, but will not need customization unless you want to do something particularly fancy. AWS_S3_REGION_NAME = 'us-east-1' # N. Virginia -AWS_S3_OBJECT_PARAMETERS = { 'Expires': 'Thu, 31 Dec 2099 20:00:00 GMT', 'CacheControl': 'max-age=94608000', } +AWS_S3_OBJECT_PARAMETERS = { 'Expires': 'Thu, 31 Dec 2099 20:00:00 GMT', + 'CacheControl': 'max-age=94608000', } AWS_DEFAULT_ACL = 'public-read' AWS_S3_CUSTOM_DOMAIN = '%s.s3.amazonaws.com' % settings.AWS_BUCKET_NAME AWS_AUTO_CREATE_BUCKET = True @@ -200,15 +200,36 @@ This is confirmed to work for Django 2 and Django 3. ' """ -from django.core.exceptions import ImproperlyConfigured, SuspiciousOperation, SuspiciousFileOperation +from django.core.exceptions import ( + ImproperlyConfigured, + SuspiciousOperation, + SuspiciousFileOperation, +) try: from django.conf import settings as ev_settings - if not ev_settings.AWS_ACCESS_KEY_ID or not ev_settings.AWS_SECRET_ACCESS_KEY or not ev_settings.AWS_STORAGE_BUCKET_NAME or not ev_settings.AWS_S3_REGION_NAME: - raise ImproperlyConfigured("You must add AWS-specific settings to mygame/server/conf/secret_settings.py to use this plugin.") - if 'mygame-evennia' == ev_settings.AWS_STORAGE_BUCKET_NAME: - raise ImproperlyConfigured("You must customize your AWS_STORAGE_BUCKET_NAME in mygame/server/conf/secret_settings.py; it must be unique among ALL other S3 users") + if ( + not ev_settings.AWS_ACCESS_KEY_ID + or not ev_settings.AWS_SECRET_ACCESS_KEY + or not ev_settings.AWS_STORAGE_BUCKET_NAME + or not ev_settings.AWS_S3_REGION_NAME + ): + raise ImproperlyConfigured( + ( + "You must add AWS-specific settings" + "to mygame/server/conf/secret_settings.py to use this plugin." + ) + ) + + if "mygame-evennia" == ev_settings.AWS_STORAGE_BUCKET_NAME: + raise ImproperlyConfigured( + ( + "You must customize your AWS_STORAGE_BUCKET_NAME" + "in mygame/server/conf/secret_settings.py;" + "it must be unique among ALL other S3 users" + ) + ) except Exception as e: print(e) @@ -223,7 +244,7 @@ from tempfile import SpooledTemporaryFile from django.core.files.base import File from django.core.files.storage import Storage from django.utils.deconstruct import deconstructible -from django.utils.encoding import (filepath_to_uri, force_bytes, force_text, smart_text) +from django.utils.encoding import filepath_to_uri, force_bytes, force_text, smart_text from django.utils.timezone import is_naive, make_naive try: @@ -237,9 +258,9 @@ try: from botocore.client import Config from botocore.exceptions import ClientError except ImportError as e: - raise ImproperlyConfigured("Could not load Boto3's S3 bindings. %s Did you run 'pip install boto3?'" % e) + raise ImproperlyConfigured("Couldn't load S3 bindings. %s Did you run 'pip install boto3?'" % e) -boto3_version_info = tuple([int(i) for i in boto3_version.split('.')]) +boto3_version_info = tuple([int(i) for i in boto3_version.split(".")]) def setting(name, default=None): @@ -275,27 +296,26 @@ def safe_join(base, *paths): """ base_path = force_text(base) - base_path = base_path.rstrip('/') + base_path = base_path.rstrip("/") paths = [force_text(p) for p in paths] - final_path = base_path + '/' + final_path = base_path + "/" for path in paths: _final_path = posixpath.normpath(posixpath.join(final_path, path)) # posixpath.normpath() strips the trailing /. Add it back. - if path.endswith('/') or _final_path + '/' == final_path: - _final_path += '/' + if path.endswith("/") or _final_path + "/" == final_path: + _final_path += "/" final_path = _final_path if final_path == base_path: - final_path += '/' + final_path += "/" # Ensure final_path starts with base_path and that the next character after # the base path is /. base_path_len = len(base_path) - if (not final_path.startswith(base_path) or final_path[base_path_len] != '/'): - raise ValueError('the joined path is located outside of the base path' - ' component') + if not final_path.startswith(base_path) or final_path[base_path_len] != "/": + raise ValueError("the joined path is located outside of the base path" " component") - return final_path.lstrip('/') + return final_path.lstrip("/") def check_location(storage): @@ -310,13 +330,11 @@ def check_location(storage): this is raised. """ - if storage.location.startswith('/'): - correct = storage.location.lstrip('/') + if storage.location.startswith("/"): + correct = storage.location.lstrip("/") raise ImproperlyConfigured( "{}.location cannot begin with a leading slash. Found '{}'. Use '{}' instead.".format( - storage.__class__.__name__, - storage.location, - correct, + storage.__class__.__name__, storage.location, correct, ) ) @@ -361,7 +379,7 @@ def get_available_overwrite_name(name, max_length): if not file_root: raise SuspiciousFileOperation( 'aws-s3-cdn tried to truncate away entire filename "%s". ' - 'Please make sure that the corresponding file field ' + "Please make sure that the corresponding file field " 'allows sufficient "max_length".' % name ) return os.path.join(dir_name, "{}{}".format(file_root, file_ext)) @@ -383,7 +401,8 @@ class S3Boto3StorageFile(File): order to properly write the file to S3. Be sure to close the file in your application. """ - buffer_size = setting('AWS_S3_FILE_BUFFER_SIZE', 5242880) + + buffer_size = setting("AWS_S3_FILE_BUFFER_SIZE", 5242880) def __init__(self, name, mode, storage, buffer_size=None): """ @@ -395,14 +414,14 @@ class S3Boto3StorageFile(File): storage (Storage): The Django Storage object buffer_size (int): The buffer size, for multipart uploads """ - if 'r' in mode and 'w' in mode: + if "r" in mode and "w" in mode: raise ValueError("Can't combine 'r' and 'w' in mode.") self._storage = storage - self.name = name[len(self._storage.location):].lstrip('/') + self.name = name[len(self._storage.location):].lstrip("/") self._mode = mode - self._force_mode = (lambda b: b) if 'b' in mode else force_text + self._force_mode = (lambda b: b) if "b" in mode else force_text self.obj = storage.bucket.Object(storage._encode_name(name)) - if 'w' not in mode: + if "w" not in mode: # Force early RAII-style exception if object does not exist self.obj.load() self._is_dirty = False @@ -432,13 +451,13 @@ class S3Boto3StorageFile(File): self._file = SpooledTemporaryFile( max_size=self._storage.max_memory_size, suffix=".S3Boto3StorageFile", - dir=setting("FILE_UPLOAD_TEMP_DIR") + dir=setting("FILE_UPLOAD_TEMP_DIR"), ) - if 'r' in self._mode: + if "r" in self._mode: self._is_dirty = False self.obj.download_fileobj(self._file) self._file.seek(0) - if self._storage.gzip and self.obj.content_encoding == 'gzip': + if self._storage.gzip and self.obj.content_encoding == "gzip": self._file = GzipFile(mode=self._mode, fileobj=self._file, mtime=0.0) return self._file @@ -451,7 +470,7 @@ class S3Boto3StorageFile(File): """ Checks if file is in read mode; then continues to boto3 operation """ - if 'r' not in self._mode: + if "r" not in self._mode: raise AttributeError("File was not opened in read mode.") return self._force_mode(super().read(*args, **kwargs)) @@ -459,7 +478,7 @@ class S3Boto3StorageFile(File): """ Checks if file is in read mode; then continues to boto3 operation """ - if 'r' not in self._mode: + if "r" not in self._mode: raise AttributeError("File was not opened in read mode.") return self._force_mode(super().readline(*args, **kwargs)) @@ -468,7 +487,7 @@ class S3Boto3StorageFile(File): Checks if file is in write mode or needs multipart handling, then continues to boto3 operation. """ - if 'w' not in self._mode: + if "w" not in self._mode: raise AttributeError("File was not opened in write mode.") self._is_dirty = True if self._multipart is None: @@ -521,9 +540,7 @@ class S3Boto3StorageFile(File): self.obj.load() except ClientError as err: if err.response["ResponseMetadata"]["HTTPStatusCode"] == 404: - self.obj.put( - Body=b"", **self._storage._get_write_parameters(self.obj.key) - ) + self.obj.put(Body=b"", **self._storage._get_write_parameters(self.obj.key)) else: raise @@ -533,14 +550,15 @@ class S3Boto3StorageFile(File): """ if self._is_dirty: self._flush_write_buffer() - parts = [{'ETag': part.e_tag, 'PartNumber': part.part_number} - for part in self._multipart.parts.all()] - self._multipart.complete( - MultipartUpload={'Parts': parts}) + parts = [ + {"ETag": part.e_tag, "PartNumber": part.part_number} + for part in self._multipart.parts.all() + ] + self._multipart.complete(MultipartUpload={"Parts": parts}) else: if self._multipart is not None: self._multipart.abort() - if 'w' in self._mode and self._raw_bytes_written == 0: + if "w" in self._mode and self._raw_bytes_written == 0: self._create_empty_on_close() if self._file is not None: self._file.close() @@ -555,50 +573,54 @@ class S3Boto3Storage(Storage): mode and supports streaming(buffering) data in chunks to S3 when writing. """ - default_content_type = 'application/octet-stream' + + default_content_type = "application/octet-stream" # If config provided in init, signature_version and addressing_style settings/args are ignored. config = None # used for looking up the access and secret key from env vars - access_key_names = ['AWS_S3_ACCESS_KEY_ID', 'AWS_ACCESS_KEY_ID'] - secret_key_names = ['AWS_S3_SECRET_ACCESS_KEY', 'AWS_SECRET_ACCESS_KEY'] - security_token_names = ['AWS_SESSION_TOKEN', 'AWS_SECURITY_TOKEN'] + access_key_names = ["AWS_S3_ACCESS_KEY_ID", "AWS_ACCESS_KEY_ID"] + secret_key_names = ["AWS_S3_SECRET_ACCESS_KEY", "AWS_SECRET_ACCESS_KEY"] + security_token_names = ["AWS_SESSION_TOKEN", "AWS_SECURITY_TOKEN"] security_token = None - access_key = setting('AWS_S3_ACCESS_KEY_ID', setting('AWS_ACCESS_KEY_ID')) - secret_key = setting('AWS_S3_SECRET_ACCESS_KEY', setting('AWS_SECRET_ACCESS_KEY')) - file_overwrite = setting('AWS_S3_FILE_OVERWRITE', True) - object_parameters = setting('AWS_S3_OBJECT_PARAMETERS', {}) - bucket_name = setting('AWS_STORAGE_BUCKET_NAME') - auto_create_bucket = setting('AWS_AUTO_CREATE_BUCKET', False) - default_acl = setting('AWS_DEFAULT_ACL', 'public-read') - bucket_acl = setting('AWS_BUCKET_ACL', default_acl) - querystring_auth = setting('AWS_QUERYSTRING_AUTH', True) - querystring_expire = setting('AWS_QUERYSTRING_EXPIRE', 3600) - signature_version = setting('AWS_S3_SIGNATURE_VERSION') - reduced_redundancy = setting('AWS_REDUCED_REDUNDANCY', False) - location = setting('AWS_LOCATION', '') - encryption = setting('AWS_S3_ENCRYPTION', False) - custom_domain = setting('AWS_S3_CUSTOM_DOMAIN') - addressing_style = setting('AWS_S3_ADDRESSING_STYLE') - secure_urls = setting('AWS_S3_SECURE_URLS', True) - file_name_charset = setting('AWS_S3_FILE_NAME_CHARSET', 'utf-8') - gzip = setting('AWS_IS_GZIPPED', False) - preload_metadata = setting('AWS_PRELOAD_METADATA', False) - gzip_content_types = setting('GZIP_CONTENT_TYPES', ( - 'text/css', - 'text/javascript', - 'application/javascript', - 'application/x-javascript', - 'image/svg+xml', - )) - url_protocol = setting('AWS_S3_URL_PROTOCOL', 'http:') - endpoint_url = setting('AWS_S3_ENDPOINT_URL') - proxies = setting('AWS_S3_PROXIES') - region_name = setting('AWS_S3_REGION_NAME') - use_ssl = setting('AWS_S3_USE_SSL', True) - verify = setting('AWS_S3_VERIFY', None) - max_memory_size = setting('AWS_S3_MAX_MEMORY_SIZE', 0) + access_key = setting("AWS_S3_ACCESS_KEY_ID", setting("AWS_ACCESS_KEY_ID")) + secret_key = setting("AWS_S3_SECRET_ACCESS_KEY", setting("AWS_SECRET_ACCESS_KEY")) + file_overwrite = setting("AWS_S3_FILE_OVERWRITE", True) + object_parameters = setting("AWS_S3_OBJECT_PARAMETERS", {}) + bucket_name = setting("AWS_STORAGE_BUCKET_NAME") + auto_create_bucket = setting("AWS_AUTO_CREATE_BUCKET", False) + default_acl = setting("AWS_DEFAULT_ACL", "public-read") + bucket_acl = setting("AWS_BUCKET_ACL", default_acl) + querystring_auth = setting("AWS_QUERYSTRING_AUTH", True) + querystring_expire = setting("AWS_QUERYSTRING_EXPIRE", 3600) + signature_version = setting("AWS_S3_SIGNATURE_VERSION") + reduced_redundancy = setting("AWS_REDUCED_REDUNDANCY", False) + location = setting("AWS_LOCATION", "") + encryption = setting("AWS_S3_ENCRYPTION", False) + custom_domain = setting("AWS_S3_CUSTOM_DOMAIN") + addressing_style = setting("AWS_S3_ADDRESSING_STYLE") + secure_urls = setting("AWS_S3_SECURE_URLS", True) + file_name_charset = setting("AWS_S3_FILE_NAME_CHARSET", "utf-8") + gzip = setting("AWS_IS_GZIPPED", False) + preload_metadata = setting("AWS_PRELOAD_METADATA", False) + gzip_content_types = setting( + "GZIP_CONTENT_TYPES", + ( + "text/css", + "text/javascript", + "application/javascript", + "application/x-javascript", + "image/svg+xml", + ), + ) + url_protocol = setting("AWS_S3_URL_PROTOCOL", "http:") + endpoint_url = setting("AWS_S3_ENDPOINT_URL") + proxies = setting("AWS_S3_PROXIES") + region_name = setting("AWS_S3_REGION_NAME") + use_ssl = setting("AWS_S3_USE_SSL", True) + verify = setting("AWS_S3_VERIFY", None) + max_memory_size = setting("AWS_S3_MAX_MEMORY_SIZE", 0) def __init__(self, acl=None, bucket=None, **settings): """ @@ -615,7 +637,7 @@ class S3Boto3Storage(Storage): # we fall back to https if specified in order to avoid the construction # of unsecure urls. if self.secure_urls: - self.url_protocol = 'https:' + self.url_protocol = "https:" self._entries = {} self._bucket = None @@ -626,23 +648,23 @@ class S3Boto3Storage(Storage): if not self.config: kwargs = dict( - s3={'addressing_style': self.addressing_style}, + s3={"addressing_style": self.addressing_style}, signature_version=self.signature_version, ) if boto3_version_info >= (1, 4, 4): - kwargs['proxies'] = self.proxies + kwargs["proxies"] = self.proxies self.config = Config(**kwargs) def __getstate__(self): state = self.__dict__.copy() - state.pop('_connections', None) - state.pop('_bucket', None) + state.pop("_connections", None) + state.pop("_bucket", None) return state def __setstate__(self, state): - state['_connections'] = threading.local() - state['_bucket'] = None + state["_connections"] = threading.local() + state["_bucket"] = None self.__dict__ = state @property @@ -650,11 +672,11 @@ class S3Boto3Storage(Storage): """ Creates the actual connection to S3 """ - connection = getattr(self._connections, 'connection', None) + connection = getattr(self._connections, "connection", None) if connection is None: session = boto3.session.Session() self._connections.connection = session.resource( - 's3', + "s3", aws_access_key_id=self.access_key, aws_secret_access_key=self.secret_key, aws_session_token=self.security_token, @@ -717,13 +739,15 @@ class S3Boto3Storage(Storage): # fails on wrong region, while bucket.load() does not. bucket.meta.client.head_bucket(Bucket=name) except ClientError as err: - if err.response['ResponseMetadata']['HTTPStatusCode'] == 301: - raise ImproperlyConfigured("Bucket %s exists, but in a different " - "region than we are connecting to. Set " - "the region to connect to by setting " - "AWS_S3_REGION_NAME to the correct region." % name) + if err.response["ResponseMetadata"]["HTTPStatusCode"] == 301: + raise ImproperlyConfigured( + "Bucket %s exists, but in a different " + "region than we are connecting to. Set " + "the region to connect to by setting " + "AWS_S3_REGION_NAME to the correct region." % name + ) - elif err.response['ResponseMetadata']['HTTPStatusCode'] == 404: + elif err.response["ResponseMetadata"]["HTTPStatusCode"] == 404: # Notes: When using the us-east-1 Standard endpoint, you can create # buckets in other regions. The same is not true when hitting region specific # endpoints. However, when you create the bucket not in the same region, the @@ -737,13 +761,14 @@ class S3Boto3Storage(Storage): # region names; LocationConstraint *must* be blank to create in US Standard. if self.bucket_acl: - bucket_params = {'ACL': self.bucket_acl} + bucket_params = {"ACL": self.bucket_acl} else: bucket_params = {} region_name = self.connection.meta.client.meta.region_name - if region_name != 'us-east-1': - bucket_params['CreateBucketConfiguration'] = { - 'LocationConstraint': region_name} + if region_name != "us-east-1": + bucket_params["CreateBucketConfiguration"] = { + "LocationConstraint": region_name + } bucket.create(**bucket_params) else: raise @@ -754,13 +779,13 @@ class S3Boto3Storage(Storage): Cleans the name so that Windows style paths work """ # Normalize Windows style paths - clean_name = posixpath.normpath(name).replace('\\', '/') + clean_name = posixpath.normpath(name).replace("\\", "/") # os.path.normpath() can strip trailing slashes so we implement # a workaround here. - if name.endswith('/') and not clean_name.endswith('/'): + if name.endswith("/") and not clean_name.endswith("/"): # Add a trailing slash as it was stripped. - clean_name += '/' + clean_name += "/" return clean_name def _normalize_name(self, name): @@ -772,8 +797,7 @@ class S3Boto3Storage(Storage): try: return safe_join(self.location, name) except ValueError: - raise SuspiciousOperation("Attempted access to '%s' denied." % - name) + raise SuspiciousOperation("Attempted access to '%s' denied." % name) def _encode_name(self, name): return smart_text(name, encoding=self.file_name_charset) @@ -786,10 +810,10 @@ class S3Boto3Storage(Storage): content.seek(0) zbuf = io.BytesIO() # The GZIP header has a modification time attribute (see http://www.zlib.org/rfc-gzip.html) - # This means each time a file is compressed it changes even if the other contents don't change + # Each time a file is compressed it changes even if the other contents don't change # For S3 this defeats detection of changes using MD5 sums on gzipped files # Fixing the mtime at 0.0 at compression time avoids this problem - zfile = GzipFile(mode='wb', fileobj=zbuf, mtime=0.0) + zfile = GzipFile(mode="wb", fileobj=zbuf, mtime=0.0) try: zfile.write(force_bytes(content.read())) finally: @@ -800,7 +824,7 @@ class S3Boto3Storage(Storage): # so just returning the BytesIO directly return zbuf - def _open(self, name, mode='rb'): + def _open(self, name, mode="rb"): """ Opens the file, if it exists. """ @@ -808,8 +832,8 @@ class S3Boto3Storage(Storage): try: f = S3Boto3StorageFile(name, mode, self) except ClientError as err: - if err.response['ResponseMetadata']['HTTPStatusCode'] == 404: - raise IOError('File does not exist: %s' % name) + if err.response["ResponseMetadata"]["HTTPStatusCode"] == 404: + raise IOError("File does not exist: %s" % name) raise # Let it bubble up if it was some other error return f @@ -821,11 +845,13 @@ class S3Boto3Storage(Storage): name = self._normalize_name(cleaned_name) params = self._get_write_parameters(name, content) - if (self.gzip and - params['ContentType'] in self.gzip_content_types and - 'ContentEncoding' not in params): + if ( + self.gzip + and params["ContentType"] in self.gzip_content_types + and "ContentEncoding" not in params + ): content = self._compress_content(content) - params['ContentEncoding'] = 'gzip' + params["ContentEncoding"] = "gzip" encoded_name = self._encode_name(name) obj = self.bucket.Object(encoded_name) @@ -867,18 +893,18 @@ class S3Boto3Storage(Storage): path = self._normalize_name(self._clean_name(name)) # The path needs to end with a slash, but if the root is empty, leave # it. - if path and not path.endswith('/'): - path += '/' + if path and not path.endswith("/"): + path += "/" directories = [] files = [] - paginator = self.connection.meta.client.get_paginator('list_objects') - pages = paginator.paginate(Bucket=self.bucket_name, Delimiter='/', Prefix=path) + paginator = self.connection.meta.client.get_paginator("list_objects") + pages = paginator.paginate(Bucket=self.bucket_name, Delimiter="/", Prefix=path) for page in pages: - for entry in page.get('CommonPrefixes', ()): - directories.append(posixpath.relpath(entry['Prefix'], path)) - for entry in page.get('Contents', ()): - files.append(posixpath.relpath(entry['Key'], path)) + for entry in page.get("CommonPrefixes", ()): + directories.append(posixpath.relpath(entry["Prefix"], path)) + for entry in page.get("Contents", ()): + files.append(posixpath.relpath(entry["Key"], path)) return directories, files def size(self, name): @@ -889,7 +915,7 @@ class S3Boto3Storage(Storage): if self.entries: entry = self.entries.get(name) if entry: - return entry.size if hasattr(entry, 'size') else entry.content_length + return entry.size if hasattr(entry, "size") else entry.content_length return 0 return self.bucket.Object(self._encode_name(name)).content_length @@ -897,19 +923,19 @@ class S3Boto3Storage(Storage): params = {} if self.encryption: - params['ServerSideEncryption'] = 'AES256' + params["ServerSideEncryption"] = "AES256" if self.reduced_redundancy: - params['StorageClass'] = 'REDUCED_REDUNDANCY' + params["StorageClass"] = "REDUCED_REDUNDANCY" if self.default_acl: - params['ACL'] = self.default_acl + params["ACL"] = self.default_acl _type, encoding = mimetypes.guess_type(name) - content_type = getattr(content, 'content_type', None) + content_type = getattr(content, "content_type", None) content_type = content_type or _type or self.default_content_type - params['ContentType'] = content_type + params["ContentType"] = content_type if encoding: - params['ContentEncoding'] = encoding + params["ContentEncoding"] = encoding params.update(self.get_object_parameters(name)) return params @@ -934,7 +960,7 @@ class S3Boto3Storage(Storage): # in the preloaded metadata. if entry is None: entry = self.bucket.Object(self._encode_name(name)) - if setting('USE_TZ'): + if setting("USE_TZ"): # boto3 returns TZ aware timestamps return entry.last_modified else: @@ -960,14 +986,21 @@ class S3Boto3Storage(Storage): split_url = urlparse.urlsplit(url) qs = urlparse.parse_qsl(split_url.query, keep_blank_values=True) blacklist = { - 'x-amz-algorithm', 'x-amz-credential', 'x-amz-date', - 'x-amz-expires', 'x-amz-signedheaders', 'x-amz-signature', - 'x-amz-security-token', 'awsaccesskeyid', 'expires', 'signature', + "x-amz-algorithm", + "x-amz-credential", + "x-amz-date", + "x-amz-expires", + "x-amz-signedheaders", + "x-amz-signature", + "x-amz-security-token", + "awsaccesskeyid", + "expires", + "signature", } filtered_qs = ((key, val) for key, val in qs if key.lower() not in blacklist) # Note: Parameters that did not have a value in the original query string will have # an '=' sign appended to it, e.g ?foo&bar becomes ?foo=&bar= - joined_qs = ('='.join(keyval) for keyval in filtered_qs) + joined_qs = ("=".join(keyval) for keyval in filtered_qs) split_url = split_url._replace(query="&".join(joined_qs)) return split_url.geturl() @@ -978,16 +1011,16 @@ class S3Boto3Storage(Storage): # Preserve the trailing slash after normalizing the path. name = self._normalize_name(self._clean_name(name)) if self.custom_domain: - return "{}//{}/{}".format(self.url_protocol, - self.custom_domain, filepath_to_uri(name)) + return "{}//{}/{}".format(self.url_protocol, self.custom_domain, filepath_to_uri(name)) if expire is None: expire = self.querystring_expire params = parameters.copy() if parameters else {} - params['Bucket'] = self.bucket.name - params['Key'] = self._encode_name(name) - url = self.bucket.meta.client.generate_presigned_url('get_object', Params=params, - ExpiresIn=expire) + params["Bucket"] = self.bucket.name + params["Key"] = self._encode_name(name) + url = self.bucket.meta.client.generate_presigned_url( + "get_object", Params=params, ExpiresIn=expire + ) if self.querystring_auth: return url return self._strip_signing_parameters(url) From 388d615f254fb1617e16b5c9691d939735c76d47 Mon Sep 17 00:00:00 2001 From: trhr Date: Wed, 26 Feb 2020 20:42:18 -0600 Subject: [PATCH 4/7] All changes but unit tests --- evennia/contrib/aws-s3-cdn.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/evennia/contrib/aws-s3-cdn.py b/evennia/contrib/aws-s3-cdn.py index 41a6e54113..d09943b3b5 100644 --- a/evennia/contrib/aws-s3-cdn.py +++ b/evennia/contrib/aws-s3-cdn.py @@ -1,4 +1,7 @@ """ +AWS Storage System +The Right Honourable Reverend (trhr) 2020 + ABOUT THIS PLUGIN: This plugin migrates the Web-based portion of Evennia, namely images, @@ -139,7 +142,7 @@ UNINSTALLATION: If you haven't made changes to your static files (uploaded images, etc), you can simply remove the lines you added to secret_settings.py. If you -have made changes and want to install at a later date, you can export +have made changes and want to uninstall at a later date, you can export your files from your S3 bucket and put them in /static/ in the evennia directory. From 25ab0a8bbbeb1856b478e926107395ed7b60cf68 Mon Sep 17 00:00:00 2001 From: trhr Date: Fri, 20 Mar 2020 22:03:14 -0500 Subject: [PATCH 5/7] added tests --- evennia/contrib/tests.py | 716 +++++++++++++++++++++++++++++++++++++++ 1 file changed, 716 insertions(+) diff --git a/evennia/contrib/tests.py b/evennia/contrib/tests.py index fa132f6264..e7932ac6ee 100644 --- a/evennia/contrib/tests.py +++ b/evennia/contrib/tests.py @@ -3265,3 +3265,719 @@ class TestBuildingMenu(CommandTest): self.assertEqual(self.char1.ndb._building_menu.obj, self.room1) self.call(CmdNoMatch(building_menu=self.menu), "q") self.assertEqual(self.exit.key, "in") + +import gzip +import pickle +import threading +import warnings +from datetime import datetime +from unittest import skipIf + +from botocore.exceptions import ClientError +from django.core.exceptions import ImproperlyConfigured +from django.core.files.base import ContentFile +from django.test import TestCase +from django.utils.timezone import is_aware, utc + +from evennia.contrib import aws_s3_cdn as s3boto3 + +try: + from django.utils.six.moves.urllib import parse as urlparse +except ImportError: + from urllib import parse as urlparse + + +try: + from unittest import mock +except ImportError: # Python 3.2 and below + import mock + + +class S3Boto3TestCase(TestCase): + def setUp(self): + self.storage = s3boto3.S3Boto3Storage() + self.storage._connections.connection = mock.MagicMock() + + +class S3Boto3StorageTests(S3Boto3TestCase): + + def test_clean_name(self): + """ + Test the base case of _clean_name + """ + path = self.storage._clean_name("path/to/somewhere") + self.assertEqual(path, "path/to/somewhere") + + def test_clean_name_normalize(self): + """ + Test the normalization of _clean_name + """ + path = self.storage._clean_name("path/to/../somewhere") + self.assertEqual(path, "path/somewhere") + + def test_clean_name_trailing_slash(self): + """ + Test the _clean_name when the path has a trailing slash + """ + path = self.storage._clean_name("path/to/somewhere/") + self.assertEqual(path, "path/to/somewhere/") + + def test_clean_name_windows(self): + """ + Test the _clean_name when the path has a trailing slash + """ + path = self.storage._clean_name("path\\to\\somewhere") + self.assertEqual(path, "path/to/somewhere") + + def test_pickle_with_bucket(self): + """ + Test that the storage can be pickled with a bucket attached + """ + # Ensure the bucket has been used + self.storage.bucket + self.assertIsNotNone(self.storage._bucket) + + # Can't pickle MagicMock, but you can't pickle a real Bucket object either + p = pickle.dumps(self.storage) + new_storage = pickle.loads(p) + + self.assertIsInstance(new_storage._connections, threading.local) + # Put the mock connection back in + new_storage._connections.connection = mock.MagicMock() + + self.assertIsNone(new_storage._bucket) + new_storage.bucket + self.assertIsNotNone(new_storage._bucket) + + def test_pickle_without_bucket(self): + """ + Test that the storage can be pickled, without a bucket instance + """ + + # Can't pickle a threadlocal + p = pickle.dumps(self.storage) + new_storage = pickle.loads(p) + + self.assertIsInstance(new_storage._connections, threading.local) + + def test_storage_url_slashes(self): + """ + Test URL generation. + """ + self.storage.custom_domain = 'example.com' + + # We expect no leading slashes in the path, + # and trailing slashes should be preserved. + self.assertEqual(self.storage.url(''), 'https://example.com/') + self.assertEqual(self.storage.url('path'), 'https://example.com/path') + self.assertEqual(self.storage.url('path/'), 'https://example.com/path/') + self.assertEqual(self.storage.url('path/1'), 'https://example.com/path/1') + self.assertEqual(self.storage.url('path/1/'), 'https://example.com/path/1/') + + def test_storage_save(self): + """ + Test saving a file + """ + name = 'test_storage_save.txt' + content = ContentFile('new content') + self.storage.save(name, content) + self.storage.bucket.Object.assert_called_once_with(name) + + obj = self.storage.bucket.Object.return_value + obj.upload_fileobj.assert_called_with( + content, + ExtraArgs={ + 'ContentType': 'text/plain', + 'ACL': self.storage.default_acl, + } + ) + + def test_storage_save_with_acl(self): + """ + Test saving a file with user defined ACL. + """ + name = 'test_storage_save.txt' + content = ContentFile('new content') + self.storage.default_acl = 'private' + self.storage.save(name, content) + self.storage.bucket.Object.assert_called_once_with(name) + + obj = self.storage.bucket.Object.return_value + obj.upload_fileobj.assert_called_with( + content, + ExtraArgs={ + 'ContentType': 'text/plain', + 'ACL': 'private', + } + ) + + def test_content_type(self): + """ + Test saving a file with a None content type. + """ + name = 'test_image.jpg' + content = ContentFile('data') + content.content_type = None + self.storage.save(name, content) + self.storage.bucket.Object.assert_called_once_with(name) + + obj = self.storage.bucket.Object.return_value + obj.upload_fileobj.assert_called_with( + content, + ExtraArgs={ + 'ContentType': 'image/jpeg', + 'ACL': self.storage.default_acl, + } + ) + + def test_storage_save_gzipped(self): + """ + Test saving a gzipped file + """ + name = 'test_storage_save.gz' + content = ContentFile("I am gzip'd") + self.storage.save(name, content) + obj = self.storage.bucket.Object.return_value + obj.upload_fileobj.assert_called_with( + content, + ExtraArgs={ + 'ContentType': 'application/octet-stream', + 'ContentEncoding': 'gzip', + 'ACL': self.storage.default_acl, + } + ) + + def test_storage_save_gzip(self): + """ + Test saving a file with gzip enabled. + """ + self.storage.gzip = True + name = 'test_storage_save.css' + content = ContentFile("I should be gzip'd") + self.storage.save(name, content) + obj = self.storage.bucket.Object.return_value + obj.upload_fileobj.assert_called_with( + mock.ANY, + ExtraArgs={ + 'ContentType': 'text/css', + 'ContentEncoding': 'gzip', + 'ACL': self.storage.default_acl, + } + ) + args, kwargs = obj.upload_fileobj.call_args + content = args[0] + zfile = gzip.GzipFile(mode='rb', fileobj=content) + self.assertEqual(zfile.read(), b"I should be gzip'd") + + def test_storage_save_gzip_twice(self): + """ + Test saving the same file content twice with gzip enabled. + """ + # Given + self.storage.gzip = True + name = 'test_storage_save.css' + content = ContentFile("I should be gzip'd") + + # When + self.storage.save(name, content) + self.storage.save('test_storage_save_2.css', content) + + # Then + obj = self.storage.bucket.Object.return_value + obj.upload_fileobj.assert_called_with( + mock.ANY, + ExtraArgs={ + 'ContentType': 'text/css', + 'ContentEncoding': 'gzip', + 'ACL': self.storage.default_acl, + } + ) + args, kwargs = obj.upload_fileobj.call_args + content = args[0] + zfile = gzip.GzipFile(mode='rb', fileobj=content) + self.assertEqual(zfile.read(), b"I should be gzip'd") + + def test_compress_content_len(self): + """ + Test that file returned by _compress_content() is readable. + """ + self.storage.gzip = True + content = ContentFile("I should be gzip'd") + content = self.storage._compress_content(content) + self.assertTrue(len(content.read()) > 0) + + def test_storage_open_write(self): + """ + Test opening a file in write mode + """ + name = 'test_open_for_writïng.txt' + content = 'new content' + + # Set the encryption flag used for multipart uploads + self.storage.encryption = True + self.storage.reduced_redundancy = True + self.storage.default_acl = 'public-read' + + file = self.storage.open(name, 'w') + self.storage.bucket.Object.assert_called_with(name) + obj = self.storage.bucket.Object.return_value + # Set the name of the mock object + obj.key = name + + file.write(content) + obj.initiate_multipart_upload.assert_called_with( + ACL='public-read', + ContentType='text/plain', + ServerSideEncryption='AES256', + StorageClass='REDUCED_REDUNDANCY' + ) + + # Save the internal file before closing + multipart = obj.initiate_multipart_upload.return_value + multipart.parts.all.return_value = [mock.MagicMock(e_tag='123', part_number=1)] + file.close() + multipart.Part.assert_called_with(1) + part = multipart.Part.return_value + part.upload.assert_called_with(Body=content.encode('utf-8')) + multipart.complete.assert_called_once_with( + MultipartUpload={'Parts': [{'ETag': '123', 'PartNumber': 1}]}) + + def test_storage_open_no_write(self): + """ + Test opening file in write mode and closing without writing. + + A file should be created as by obj.put(...). + """ + name = 'test_open_no_write.txt' + + # Set the encryption flag used for puts + self.storage.encryption = True + self.storage.reduced_redundancy = True + self.storage.default_acl = 'public-read' + + file = self.storage.open(name, 'w') + self.storage.bucket.Object.assert_called_with(name) + obj = self.storage.bucket.Object.return_value + obj.load.side_effect = ClientError({'Error': {}, + 'ResponseMetadata': {'HTTPStatusCode': 404}}, + 'head_bucket') + + # Set the name of the mock object + obj.key = name + + # Save the internal file before closing + file.close() + + obj.load.assert_called_once_with() + obj.put.assert_called_once_with( + ACL='public-read', + Body=b"", + ContentType='text/plain', + ServerSideEncryption='AES256', + StorageClass='REDUCED_REDUNDANCY' + ) + + def test_storage_open_no_overwrite_existing(self): + """ + Test opening an existing file in write mode and closing without writing. + """ + name = 'test_open_no_overwrite_existing.txt' + + # Set the encryption flag used for puts + self.storage.encryption = True + self.storage.reduced_redundancy = True + self.storage.default_acl = 'public-read' + + file = self.storage.open(name, 'w') + self.storage.bucket.Object.assert_called_with(name) + obj = self.storage.bucket.Object.return_value + + # Set the name of the mock object + obj.key = name + + # Save the internal file before closing + file.close() + + obj.load.assert_called_once_with() + obj.put.assert_not_called() + + def test_storage_write_beyond_buffer_size(self): + """ + Test writing content that exceeds the buffer size + """ + name = 'test_open_for_writïng_beyond_buffer_size.txt' + + # Set the encryption flag used for multipart uploads + self.storage.encryption = True + self.storage.reduced_redundancy = True + self.storage.default_acl = 'public-read' + + file = self.storage.open(name, 'w') + self.storage.bucket.Object.assert_called_with(name) + obj = self.storage.bucket.Object.return_value + # Set the name of the mock object + obj.key = name + + # Initiate the multipart upload + file.write('') + obj.initiate_multipart_upload.assert_called_with( + ACL='public-read', + ContentType='text/plain', + ServerSideEncryption='AES256', + StorageClass='REDUCED_REDUNDANCY' + ) + multipart = obj.initiate_multipart_upload.return_value + + # Write content at least twice as long as the buffer size + written_content = '' + counter = 1 + while len(written_content) < 2 * file.buffer_size: + content = 'hello, aws {counter}\n'.format(counter=counter) + # Write more than just a few bytes in each iteration to keep the + # test reasonably fast + content += '*' * int(file.buffer_size / 10) + file.write(content) + written_content += content + counter += 1 + + # Save the internal file before closing + multipart.parts.all.return_value = [ + mock.MagicMock(e_tag='123', part_number=1), + mock.MagicMock(e_tag='456', part_number=2) + ] + file.close() + self.assertListEqual( + multipart.Part.call_args_list, + [mock.call(1), mock.call(2)] + ) + part = multipart.Part.return_value + uploaded_content = ''.join( + args_list[1]['Body'].decode('utf-8') + for args_list in part.upload.call_args_list + ) + self.assertEqual(uploaded_content, written_content) + multipart.complete.assert_called_once_with( + MultipartUpload={'Parts': [ + {'ETag': '123', 'PartNumber': 1}, + {'ETag': '456', 'PartNumber': 2}, + ]} + ) + + def test_auto_creating_bucket(self): + self.storage.auto_create_bucket = True + Bucket = mock.MagicMock() + self.storage._connections.connection.Bucket.return_value = Bucket + self.storage._connections.connection.meta.client.meta.region_name = 'sa-east-1' + + Bucket.meta.client.head_bucket.side_effect = ClientError({'Error': {}, + 'ResponseMetadata': {'HTTPStatusCode': 404}}, + 'head_bucket') + self.storage._get_or_create_bucket('testbucketname') + Bucket.create.assert_called_once_with( + ACL='public-read', + CreateBucketConfiguration={ + 'LocationConstraint': 'sa-east-1', + } + ) + + def test_auto_creating_bucket_with_acl(self): + self.storage.auto_create_bucket = True + self.storage.bucket_acl = 'public-read' + Bucket = mock.MagicMock() + self.storage._connections.connection.Bucket.return_value = Bucket + self.storage._connections.connection.meta.client.meta.region_name = 'sa-east-1' + + Bucket.meta.client.head_bucket.side_effect = ClientError({'Error': {}, + 'ResponseMetadata': {'HTTPStatusCode': 404}}, + 'head_bucket') + self.storage._get_or_create_bucket('testbucketname') + Bucket.create.assert_called_once_with( + ACL='public-read', + CreateBucketConfiguration={ + 'LocationConstraint': 'sa-east-1', + } + ) + + def test_storage_exists(self): + self.assertTrue(self.storage.exists("file.txt")) + self.storage.connection.meta.client.head_object.assert_called_with( + Bucket=self.storage.bucket_name, + Key="file.txt", + ) + + def test_storage_exists_false(self): + self.storage.connection.meta.client.head_object.side_effect = ClientError( + {'Error': {'Code': '404', 'Message': 'Not Found'}}, + 'HeadObject', + ) + self.assertFalse(self.storage.exists("file.txt")) + self.storage.connection.meta.client.head_object.assert_called_with( + Bucket=self.storage.bucket_name, + Key='file.txt', + ) + + def test_storage_exists_doesnt_create_bucket(self): + with mock.patch.object(self.storage, '_get_or_create_bucket') as method: + self.storage.exists('file.txt') + self.assertFalse(method.called) + + def test_storage_delete(self): + self.storage.delete("path/to/file.txt") + self.storage.bucket.Object.assert_called_with('path/to/file.txt') + self.storage.bucket.Object.return_value.delete.assert_called_with() + + def test_storage_listdir_base(self): + # Files: + # some/path/1.txt + # 2.txt + # other/path/3.txt + # 4.txt + pages = [ + { + 'CommonPrefixes': [ + {'Prefix': 'some'}, + {'Prefix': 'other'}, + ], + 'Contents': [ + {'Key': '2.txt'}, + {'Key': '4.txt'}, + ], + }, + ] + + paginator = mock.MagicMock() + paginator.paginate.return_value = pages + self.storage._connections.connection.meta.client.get_paginator.return_value = paginator + + dirs, files = self.storage.listdir('') + paginator.paginate.assert_called_with(Bucket=None, Delimiter='/', Prefix='') + + self.assertEqual(dirs, ['some', 'other']) + self.assertEqual(files, ['2.txt', '4.txt']) + + def test_storage_listdir_subdir(self): + # Files: + # some/path/1.txt + # some/2.txt + pages = [ + { + 'CommonPrefixes': [ + {'Prefix': 'some/path'}, + ], + 'Contents': [ + {'Key': 'some/2.txt'}, + ], + }, + ] + + paginator = mock.MagicMock() + paginator.paginate.return_value = pages + self.storage._connections.connection.meta.client.get_paginator.return_value = paginator + + dirs, files = self.storage.listdir('some/') + paginator.paginate.assert_called_with(Bucket=None, Delimiter='/', Prefix='some/') + + self.assertEqual(dirs, ['path']) + self.assertEqual(files, ['2.txt']) + + def test_storage_size(self): + obj = self.storage.bucket.Object.return_value + obj.content_length = 4098 + + name = 'file.txt' + self.assertEqual(self.storage.size(name), obj.content_length) + + def test_storage_mtime(self): + # Test both USE_TZ cases + for use_tz in (True, False): + with self.settings(USE_TZ=use_tz): + self._test_storage_mtime(use_tz) + + def _test_storage_mtime(self, use_tz): + obj = self.storage.bucket.Object.return_value + obj.last_modified = datetime.now(utc) + + name = 'file.txt' + self.assertFalse( + is_aware(self.storage.modified_time(name)), + 'Naive datetime object expected from modified_time()' + ) + + self.assertIs( + settings.USE_TZ, + is_aware(self.storage.get_modified_time(name)), + '{} datetime object expected from get_modified_time() when USE_TZ={}'.format( + ('Naive', 'Aware')[settings.USE_TZ], + settings.USE_TZ + ) + ) + + def test_storage_url(self): + name = 'test_storage_size.txt' + url = 'http://aws.amazon.com/%s' % name + self.storage.bucket.meta.client.generate_presigned_url.return_value = url + self.storage.bucket.name = 'bucket' + self.assertEqual(self.storage.url(name), url) + self.storage.bucket.meta.client.generate_presigned_url.assert_called_with( + 'get_object', + Params={'Bucket': self.storage.bucket.name, 'Key': name}, + ExpiresIn=self.storage.querystring_expire, + HttpMethod=None, + ) + + custom_expire = 123 + + self.assertEqual(self.storage.url(name, expire=custom_expire), url) + self.storage.bucket.meta.client.generate_presigned_url.assert_called_with( + 'get_object', + Params={'Bucket': self.storage.bucket.name, 'Key': name}, + ExpiresIn=custom_expire, + HttpMethod=None, + ) + + custom_method = 'HEAD' + + self.assertEqual(self.storage.url(name, http_method=custom_method), url) + self.storage.bucket.meta.client.generate_presigned_url.assert_called_with( + 'get_object', + Params={'Bucket': self.storage.bucket.name, 'Key': name}, + ExpiresIn=self.storage.querystring_expire, + HttpMethod=custom_method, + ) + + def test_generated_url_is_encoded(self): + self.storage.custom_domain = "mock.cloudfront.net" + filename = "whacky & filename.mp4" + url = self.storage.url(filename) + parsed_url = urlparse.urlparse(url) + self.assertEqual(parsed_url.path, + "/whacky%20%26%20filename.mp4") + self.assertFalse(self.storage.bucket.meta.client.generate_presigned_url.called) + + def test_special_characters(self): + self.storage.custom_domain = "mock.cloudfront.net" + + name = "ãlöhâ.jpg" + content = ContentFile('new content') + self.storage.save(name, content) + self.storage.bucket.Object.assert_called_once_with(name) + + url = self.storage.url(name) + parsed_url = urlparse.urlparse(url) + self.assertEqual(parsed_url.path, "/%C3%A3l%C3%B6h%C3%A2.jpg") + + def test_strip_signing_parameters(self): + expected = 'http://bucket.s3-aws-region.amazonaws.com/foo/bar' + self.assertEqual(self.storage._strip_signing_parameters( + '%s?X-Amz-Date=12345678&X-Amz-Signature=Signature' % expected), expected) + self.assertEqual(self.storage._strip_signing_parameters( + '%s?expires=12345678&signature=Signature' % expected), expected) + + @skipIf(threading is None, 'Test requires threading') + def test_connection_threading(self): + connections = [] + + def thread_storage_connection(): + connections.append(self.storage.connection) + + for x in range(2): + t = threading.Thread(target=thread_storage_connection) + t.start() + t.join() + + # Connection for each thread needs to be unique + self.assertIsNot(connections[0], connections[1]) + + def test_location_leading_slash(self): + msg = ( + "S3Boto3Storage.location cannot begin with a leading slash. " + "Found '/'. Use '' instead." + ) + with self.assertRaises(ImproperlyConfigured, msg=msg): + s3boto3.S3Boto3Storage(location='/') + + def test_deprecated_acl(self): + with override_settings(AWS_DEFAULT_ACL=None), warnings.catch_warnings(record=True) as w: + s3boto3.S3Boto3Storage(acl='private') + assert len(w) == 1 + assert issubclass(w[-1].category, DeprecationWarning) + message = ( + "The acl argument of S3Boto3Storage is deprecated. Use argument " + "default_acl or setting AWS_DEFAULT_ACL instead. The acl argument " + "will be removed in version 1.10." + ) + assert str(w[-1].message) == message + + def test_deprecated_bucket(self): + with override_settings(AWS_DEFAULT_ACL=None), warnings.catch_warnings(record=True) as w: + s3boto3.S3Boto3Storage(bucket='django') + assert len(w) == 1 + assert issubclass(w[-1].category, DeprecationWarning) + message = ( + "The bucket argument of S3Boto3Storage is deprecated. Use argument " + "bucket_name or setting AWS_STORAGE_BUCKET_NAME instead. The bucket " + "argument will be removed in version 1.10." + ) + assert str(w[-1].message) == message + + def test_deprecated_default_acl(self): + with warnings.catch_warnings(record=True) as w: + s3boto3.S3Boto3Storage() + assert len(w) == 1 + message = ( + "The default behavior of S3Boto3Storage is insecure and will change " + "in django-storages 1.10. By default files and new buckets are saved " + "with an ACL of 'public-read' (globally publicly readable). Version 1.10 will " + "default to using the bucket's ACL. To opt into the new behavior set " + "AWS_DEFAULT_ACL = None, otherwise to silence this warning explicitly " + "set AWS_DEFAULT_ACL." + ) + assert str(w[-1].message) == message + + def test_deprecated_autocreate_bucket(self): + with override_settings(AWS_DEFAULT_ACL=None), warnings.catch_warnings(record=True) as w: + s3boto3.S3Boto3Storage(auto_create_bucket=True) + assert len(w) == 1 + assert issubclass(w[-1].category, DeprecationWarning) + message = ( + "Automatic bucket creation will be removed in version 1.10. It encourages " + "using overly broad credentials with this library. Either create it before " + "manually or use one of a myriad of automatic configuration management tools. " + "Unset AWS_AUTO_CREATE_BUCKET (it defaults to False) to silence this warning." + ) + assert str(w[-1].message) == message + + def test_deprecated_default_acl_override_class_variable(self): + class MyStorage(s3boto3.S3Boto3Storage): + default_acl = "private" + + with warnings.catch_warnings(record=True) as w: + MyStorage() + assert len(w) == 0 + + def test_override_settings(self): + with override_settings(AWS_LOCATION='foo1'): + storage = s3boto3.S3Boto3Storage() + self.assertEqual(storage.location, 'foo1') + with override_settings(AWS_LOCATION='foo2'): + storage = s3boto3.S3Boto3Storage() + self.assertEqual(storage.location, 'foo2') + + def test_override_class_variable(self): + class MyStorage1(s3boto3.S3Boto3Storage): + location = 'foo1' + + storage = MyStorage1() + self.assertEqual(storage.location, 'foo1') + + class MyStorage2(s3boto3.S3Boto3Storage): + location = 'foo2' + + storage = MyStorage2() + self.assertEqual(storage.location, 'foo2') + + def test_override_init_argument(self): + storage = s3boto3.S3Boto3Storage(location='foo1') + self.assertEqual(storage.location, 'foo1') + storage = s3boto3.S3Boto3Storage(location='foo2') + self.assertEqual(storage.location, 'foo2') From e5372b9740251dbef4efce3172280cbb8b2d65d2 Mon Sep 17 00:00:00 2001 From: trhr Date: Fri, 20 Mar 2020 22:04:04 -0500 Subject: [PATCH 6/7] Renamed to underscores --- evennia/contrib/{aws-s3-cdn.py => aws_s3_cdn.py} | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename evennia/contrib/{aws-s3-cdn.py => aws_s3_cdn.py} (100%) diff --git a/evennia/contrib/aws-s3-cdn.py b/evennia/contrib/aws_s3_cdn.py similarity index 100% rename from evennia/contrib/aws-s3-cdn.py rename to evennia/contrib/aws_s3_cdn.py From 28e4219e760f838a830e025bce5d3990d59f9c53 Mon Sep 17 00:00:00 2001 From: trhr Date: Fri, 10 Apr 2020 00:20:59 -0500 Subject: [PATCH 7/7] Refactored and fixed tests --- __init__.py | 0 evennia/contrib/django/__init__.py | 3 + evennia/contrib/{ => django}/aws_s3_cdn.py | 9 +- evennia/contrib/django/tests.py | 635 ++++++++++++++++++ evennia/contrib/tests.py | 721 +-------------------- 5 files changed, 643 insertions(+), 725 deletions(-) create mode 100644 __init__.py create mode 100644 evennia/contrib/django/__init__.py rename evennia/contrib/{ => django}/aws_s3_cdn.py (99%) create mode 100644 evennia/contrib/django/tests.py diff --git a/__init__.py b/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/evennia/contrib/django/__init__.py b/evennia/contrib/django/__init__.py new file mode 100644 index 0000000000..02fe3ff90b --- /dev/null +++ b/evennia/contrib/django/__init__.py @@ -0,0 +1,3 @@ +""" +Intended to be a collecting folder for Django-specific contribs that do not have observable effects to players. +""" \ No newline at end of file diff --git a/evennia/contrib/aws_s3_cdn.py b/evennia/contrib/django/aws_s3_cdn.py similarity index 99% rename from evennia/contrib/aws_s3_cdn.py rename to evennia/contrib/django/aws_s3_cdn.py index d09943b3b5..048c1c4c8c 100644 --- a/evennia/contrib/aws_s3_cdn.py +++ b/evennia/contrib/django/aws_s3_cdn.py @@ -12,7 +12,7 @@ server may be sufficient for serving multimedia to a minimal number of users, the perfect use case for this plugin would be: 1) Servers supporting heavy web-based traffic (webclient, etc) -2) With a sizeable number of users +2) With a sizable number of users 3) Where the users are globally distributed 4) Where multimedia files are served to users as a part of gameplay @@ -131,7 +131,7 @@ AWS_S3_OBJECT_PARAMETERS = { 'Expires': 'Thu, 31 Dec 2099 20:00:00 GMT', AWS_DEFAULT_ACL = 'public-read' AWS_S3_CUSTOM_DOMAIN = '%s.s3.amazonaws.com' % settings.AWS_BUCKET_NAME AWS_AUTO_CREATE_BUCKET = True -STATICFILES_STORAGE = 'evennia.contrib.aws-s3-cdn.S3Boto3Storage' +STATICFILES_STORAGE = 'evennia.contrib.django.aws-s3-cdn.S3Boto3Storage' <<< END OF SECRET_SETTINGS.PY COPY/PASTE @@ -580,15 +580,14 @@ class S3Boto3Storage(Storage): default_content_type = "application/octet-stream" # If config provided in init, signature_version and addressing_style settings/args are ignored. config = None - # used for looking up the access and secret key from env vars access_key_names = ["AWS_S3_ACCESS_KEY_ID", "AWS_ACCESS_KEY_ID"] secret_key_names = ["AWS_S3_SECRET_ACCESS_KEY", "AWS_SECRET_ACCESS_KEY"] security_token_names = ["AWS_SESSION_TOKEN", "AWS_SECURITY_TOKEN"] security_token = None - access_key = setting("AWS_S3_ACCESS_KEY_ID", setting("AWS_ACCESS_KEY_ID")) - secret_key = setting("AWS_S3_SECRET_ACCESS_KEY", setting("AWS_SECRET_ACCESS_KEY")) + access_key = setting("AWS_S3_ACCESS_KEY_ID", setting("AWS_ACCESS_KEY_ID", "")) + secret_key = setting("AWS_S3_SECRET_ACCESS_KEY", setting("AWS_SECRET_ACCESS_KEY", "")) file_overwrite = setting("AWS_S3_FILE_OVERWRITE", True) object_parameters = setting("AWS_S3_OBJECT_PARAMETERS", {}) bucket_name = setting("AWS_STORAGE_BUCKET_NAME") diff --git a/evennia/contrib/django/tests.py b/evennia/contrib/django/tests.py new file mode 100644 index 0000000000..584005cf68 --- /dev/null +++ b/evennia/contrib/django/tests.py @@ -0,0 +1,635 @@ +from unittest import skipIf +from django.test import override_settings +from django.conf import settings +from django.core.exceptions import ImproperlyConfigured +from django.core.files.base import ContentFile +from django.test import TestCase +from django.utils.timezone import is_aware, utc + +import datetime, gzip, pickle, threading + +from botocore.exceptions import ClientError +from evennia.contrib.django import aws_s3_cdn as s3boto3 + +try: + from django.utils.six.moves.urllib import parse as urlparse +except ImportError: + from urllib import parse as urlparse + + +try: + from unittest import mock +except ImportError: # Python 3.2 and below + import mock + + +class S3Boto3TestCase(TestCase): + def setUp(self): + self.storage = s3boto3.S3Boto3Storage(access_key="foo", secret_key="bar") + self.storage._connections.connection = mock.MagicMock() + + +class S3Boto3StorageTests(S3Boto3TestCase): + + def test_clean_name(self): + """ + Test the base case of _clean_name + """ + path = self.storage._clean_name("path/to/somewhere") + self.assertEqual(path, "path/to/somewhere") + + def test_clean_name_normalize(self): + """ + Test the normalization of _clean_name + """ + path = self.storage._clean_name("path/to/../somewhere") + self.assertEqual(path, "path/somewhere") + + def test_clean_name_trailing_slash(self): + """ + Test the _clean_name when the path has a trailing slash + """ + path = self.storage._clean_name("path/to/somewhere/") + self.assertEqual(path, "path/to/somewhere/") + + def test_clean_name_windows(self): + """ + Test the _clean_name when the path has a trailing slash + """ + path = self.storage._clean_name("path\\to\\somewhere") + self.assertEqual(path, "path/to/somewhere") + + def test_pickle_with_bucket(self): + """ + Test that the storage can be pickled with a bucket attached + """ + # Ensure the bucket has been used + self.storage.bucket + self.assertIsNotNone(self.storage._bucket) + + # Can't pickle MagicMock, but you can't pickle a real Bucket object either + p = pickle.dumps(self.storage) + new_storage = pickle.loads(p) + + self.assertIsInstance(new_storage._connections, threading.local) + # Put the mock connection back in + new_storage._connections.connection = mock.MagicMock() + + self.assertIsNone(new_storage._bucket) + new_storage.bucket + self.assertIsNotNone(new_storage._bucket) + + def test_pickle_without_bucket(self): + """ + Test that the storage can be pickled, without a bucket instance + """ + + # Can't pickle a threadlocal + p = pickle.dumps(self.storage) + new_storage = pickle.loads(p) + + self.assertIsInstance(new_storage._connections, threading.local) + + def test_storage_url_slashes(self): + """ + Test URL generation. + """ + self.storage.custom_domain = 'example.com' + + # We expect no leading slashes in the path, + # and trailing slashes should be preserved. + self.assertEqual(self.storage.url(''), 'https://example.com/') + self.assertEqual(self.storage.url('path'), 'https://example.com/path') + self.assertEqual(self.storage.url('path/'), 'https://example.com/path/') + self.assertEqual(self.storage.url('path/1'), 'https://example.com/path/1') + self.assertEqual(self.storage.url('path/1/'), 'https://example.com/path/1/') + + def test_storage_save(self): + """ + Test saving a file + """ + name = 'test_storage_save.txt' + content = ContentFile('new content') + self.storage.save(name, content) + self.storage.bucket.Object.assert_called_once_with(name) + + obj = self.storage.bucket.Object.return_value + obj.upload_fileobj.assert_called_with( + content, + ExtraArgs={ + 'ContentType': 'text/plain', + 'ACL': self.storage.default_acl, + } + ) + + def test_storage_save_with_acl(self): + """ + Test saving a file with user defined ACL. + """ + name = 'test_storage_save.txt' + content = ContentFile('new content') + self.storage.default_acl = 'private' + self.storage.save(name, content) + self.storage.bucket.Object.assert_called_once_with(name) + + obj = self.storage.bucket.Object.return_value + obj.upload_fileobj.assert_called_with( + content, + ExtraArgs={ + 'ContentType': 'text/plain', + 'ACL': 'private', + } + ) + + def test_content_type(self): + """ + Test saving a file with a None content type. + """ + name = 'test_image.jpg' + content = ContentFile('data') + content.content_type = None + self.storage.save(name, content) + self.storage.bucket.Object.assert_called_once_with(name) + + obj = self.storage.bucket.Object.return_value + obj.upload_fileobj.assert_called_with( + content, + ExtraArgs={ + 'ContentType': 'image/jpeg', + 'ACL': self.storage.default_acl, + } + ) + + def test_storage_save_gzipped(self): + """ + Test saving a gzipped file + """ + name = 'test_storage_save.gz' + content = ContentFile("I am gzip'd") + self.storage.save(name, content) + obj = self.storage.bucket.Object.return_value + obj.upload_fileobj.assert_called_with( + content, + ExtraArgs={ + 'ContentType': 'application/octet-stream', + 'ContentEncoding': 'gzip', + 'ACL': self.storage.default_acl, + } + ) + + def test_storage_save_gzip(self): + """ + Test saving a file with gzip enabled. + """ + self.storage.gzip = True + name = 'test_storage_save.css' + content = ContentFile("I should be gzip'd") + self.storage.save(name, content) + obj = self.storage.bucket.Object.return_value + obj.upload_fileobj.assert_called_with( + mock.ANY, + ExtraArgs={ + 'ContentType': 'text/css', + 'ContentEncoding': 'gzip', + 'ACL': self.storage.default_acl, + } + ) + args, kwargs = obj.upload_fileobj.call_args + content = args[0] + zfile = gzip.GzipFile(mode='rb', fileobj=content) + self.assertEqual(zfile.read(), b"I should be gzip'd") + + def test_storage_save_gzip_twice(self): + """ + Test saving the same file content twice with gzip enabled. + """ + # Given + self.storage.gzip = True + name = 'test_storage_save.css' + content = ContentFile("I should be gzip'd") + + # When + self.storage.save(name, content) + self.storage.save('test_storage_save_2.css', content) + + # Then + obj = self.storage.bucket.Object.return_value + obj.upload_fileobj.assert_called_with( + mock.ANY, + ExtraArgs={ + 'ContentType': 'text/css', + 'ContentEncoding': 'gzip', + 'ACL': self.storage.default_acl, + } + ) + args, kwargs = obj.upload_fileobj.call_args + content = args[0] + zfile = gzip.GzipFile(mode='rb', fileobj=content) + self.assertEqual(zfile.read(), b"I should be gzip'd") + + def test_compress_content_len(self): + """ + Test that file returned by _compress_content() is readable. + """ + self.storage.gzip = True + content = ContentFile("I should be gzip'd") + content = self.storage._compress_content(content) + self.assertTrue(len(content.read()) > 0) + + def test_storage_open_write(self): + """ + Test opening a file in write mode + """ + name = 'test_open_for_writïng.txt' + content = 'new content' + + # Set the encryption flag used for multipart uploads + self.storage.encryption = True + self.storage.reduced_redundancy = True + self.storage.default_acl = 'public-read' + + file = self.storage.open(name, 'w') + self.storage.bucket.Object.assert_called_with(name) + obj = self.storage.bucket.Object.return_value + # Set the name of the mock object + obj.key = name + + file.write(content) + obj.initiate_multipart_upload.assert_called_with( + ACL='public-read', + ContentType='text/plain', + ServerSideEncryption='AES256', + StorageClass='REDUCED_REDUNDANCY' + ) + + # Save the internal file before closing + multipart = obj.initiate_multipart_upload.return_value + multipart.parts.all.return_value = [mock.MagicMock(e_tag='123', part_number=1)] + file.close() + multipart.Part.assert_called_with(1) + part = multipart.Part.return_value + part.upload.assert_called_with(Body=content.encode('utf-8')) + multipart.complete.assert_called_once_with( + MultipartUpload={'Parts': [{'ETag': '123', 'PartNumber': 1}]}) + + def test_storage_open_no_write(self): + """ + Test opening file in write mode and closing without writing. + + A file should be created as by obj.put(...). + """ + name = 'test_open_no_write.txt' + + # Set the encryption flag used for puts + self.storage.encryption = True + self.storage.reduced_redundancy = True + self.storage.default_acl = 'public-read' + + file = self.storage.open(name, 'w') + self.storage.bucket.Object.assert_called_with(name) + obj = self.storage.bucket.Object.return_value + obj.load.side_effect = ClientError({'Error': {}, + 'ResponseMetadata': {'HTTPStatusCode': 404}}, + 'head_bucket') + + # Set the name of the mock object + obj.key = name + + # Save the internal file before closing + file.close() + + obj.load.assert_called_once_with() + obj.put.assert_called_once_with( + ACL='public-read', + Body=b"", + ContentType='text/plain', + ServerSideEncryption='AES256', + StorageClass='REDUCED_REDUNDANCY' + ) + + def test_storage_open_no_overwrite_existing(self): + """ + Test opening an existing file in write mode and closing without writing. + """ + name = 'test_open_no_overwrite_existing.txt' + + # Set the encryption flag used for puts + self.storage.encryption = True + self.storage.reduced_redundancy = True + self.storage.default_acl = 'public-read' + + file = self.storage.open(name, 'w') + self.storage.bucket.Object.assert_called_with(name) + obj = self.storage.bucket.Object.return_value + + # Set the name of the mock object + obj.key = name + + # Save the internal file before closing + file.close() + + obj.load.assert_called_once_with() + obj.put.assert_not_called() + + def test_storage_write_beyond_buffer_size(self): + """ + Test writing content that exceeds the buffer size + """ + name = 'test_open_for_writïng_beyond_buffer_size.txt' + + # Set the encryption flag used for multipart uploads + self.storage.encryption = True + self.storage.reduced_redundancy = True + self.storage.default_acl = 'public-read' + + file = self.storage.open(name, 'w') + self.storage.bucket.Object.assert_called_with(name) + obj = self.storage.bucket.Object.return_value + # Set the name of the mock object + obj.key = name + + # Initiate the multipart upload + file.write('') + obj.initiate_multipart_upload.assert_called_with( + ACL='public-read', + ContentType='text/plain', + ServerSideEncryption='AES256', + StorageClass='REDUCED_REDUNDANCY' + ) + multipart = obj.initiate_multipart_upload.return_value + + # Write content at least twice as long as the buffer size + written_content = '' + counter = 1 + while len(written_content) < 2 * file.buffer_size: + content = 'hello, aws {counter}\n'.format(counter=counter) + # Write more than just a few bytes in each iteration to keep the + # test reasonably fast + content += '*' * int(file.buffer_size / 10) + file.write(content) + written_content += content + counter += 1 + + # Save the internal file before closing + multipart.parts.all.return_value = [ + mock.MagicMock(e_tag='123', part_number=1), + mock.MagicMock(e_tag='456', part_number=2) + ] + file.close() + self.assertListEqual( + multipart.Part.call_args_list, + [mock.call(1), mock.call(2)] + ) + part = multipart.Part.return_value + uploaded_content = ''.join( + args_list[1]['Body'].decode('utf-8') + for args_list in part.upload.call_args_list + ) + self.assertEqual(uploaded_content, written_content) + multipart.complete.assert_called_once_with( + MultipartUpload={'Parts': [ + {'ETag': '123', 'PartNumber': 1}, + {'ETag': '456', 'PartNumber': 2}, + ]} + ) + + def test_auto_creating_bucket(self): + self.storage.auto_create_bucket = True + Bucket = mock.MagicMock() + self.storage._connections.connection.Bucket.return_value = Bucket + self.storage._connections.connection.meta.client.meta.region_name = 'sa-east-1' + + Bucket.meta.client.head_bucket.side_effect = ClientError({'Error': {}, + 'ResponseMetadata': {'HTTPStatusCode': 404}}, + 'head_bucket') + self.storage._get_or_create_bucket('testbucketname') + Bucket.create.assert_called_once_with( + ACL='public-read', + CreateBucketConfiguration={ + 'LocationConstraint': 'sa-east-1', + } + ) + + def test_auto_creating_bucket_with_acl(self): + self.storage.auto_create_bucket = True + self.storage.bucket_acl = 'public-read' + Bucket = mock.MagicMock() + self.storage._connections.connection.Bucket.return_value = Bucket + self.storage._connections.connection.meta.client.meta.region_name = 'sa-east-1' + + Bucket.meta.client.head_bucket.side_effect = ClientError({'Error': {}, + 'ResponseMetadata': {'HTTPStatusCode': 404}}, + 'head_bucket') + self.storage._get_or_create_bucket('testbucketname') + Bucket.create.assert_called_once_with( + ACL='public-read', + CreateBucketConfiguration={ + 'LocationConstraint': 'sa-east-1', + } + ) + + def test_storage_exists(self): + self.assertTrue(self.storage.exists("file.txt")) + self.storage.connection.meta.client.head_object.assert_called_with( + Bucket=self.storage.bucket_name, + Key="file.txt", + ) + + def test_storage_exists_false(self): + self.storage.connection.meta.client.head_object.side_effect = ClientError( + {'Error': {'Code': '404', 'Message': 'Not Found'}}, + 'HeadObject', + ) + self.assertFalse(self.storage.exists("file.txt")) + self.storage.connection.meta.client.head_object.assert_called_with( + Bucket=self.storage.bucket_name, + Key='file.txt', + ) + + def test_storage_exists_doesnt_create_bucket(self): + with mock.patch.object(self.storage, '_get_or_create_bucket') as method: + self.storage.exists('file.txt') + self.assertFalse(method.called) + + def test_storage_delete(self): + self.storage.delete("path/to/file.txt") + self.storage.bucket.Object.assert_called_with('path/to/file.txt') + self.storage.bucket.Object.return_value.delete.assert_called_with() + + def test_storage_listdir_base(self): + # Files: + # some/path/1.txt + # 2.txt + # other/path/3.txt + # 4.txt + pages = [ + { + 'CommonPrefixes': [ + {'Prefix': 'some'}, + {'Prefix': 'other'}, + ], + 'Contents': [ + {'Key': '2.txt'}, + {'Key': '4.txt'}, + ], + }, + ] + + paginator = mock.MagicMock() + paginator.paginate.return_value = pages + self.storage._connections.connection.meta.client.get_paginator.return_value = paginator + + dirs, files = self.storage.listdir('') + paginator.paginate.assert_called_with(Bucket=None, Delimiter='/', Prefix='') + + self.assertEqual(dirs, ['some', 'other']) + self.assertEqual(files, ['2.txt', '4.txt']) + + def test_storage_listdir_subdir(self): + # Files: + # some/path/1.txt + # some/2.txt + pages = [ + { + 'CommonPrefixes': [ + {'Prefix': 'some/path'}, + ], + 'Contents': [ + {'Key': 'some/2.txt'}, + ], + }, + ] + + paginator = mock.MagicMock() + paginator.paginate.return_value = pages + self.storage._connections.connection.meta.client.get_paginator.return_value = paginator + + dirs, files = self.storage.listdir('some/') + paginator.paginate.assert_called_with(Bucket=None, Delimiter='/', Prefix='some/') + + self.assertEqual(dirs, ['path']) + self.assertEqual(files, ['2.txt']) + + def test_storage_size(self): + obj = self.storage.bucket.Object.return_value + obj.content_length = 4098 + + name = 'file.txt' + self.assertEqual(self.storage.size(name), obj.content_length) + + def test_storage_mtime(self): + # Test both USE_TZ cases + for use_tz in (True, False): + with self.settings(USE_TZ=use_tz): + self._test_storage_mtime(use_tz) + + def _test_storage_mtime(self, use_tz): + obj = self.storage.bucket.Object.return_value + obj.last_modified = datetime.datetime.now(utc) + + name = 'file.txt' + self.assertFalse( + is_aware(self.storage.modified_time(name)), + 'Naive datetime object expected from modified_time()' + ) + + self.assertIs( + settings.USE_TZ, + is_aware(self.storage.get_modified_time(name)), + '{} datetime object expected from get_modified_time() when USE_TZ={}'.format( + ('Naive', 'Aware')[settings.USE_TZ], + settings.USE_TZ + ) + ) + + def test_storage_url(self): + name = 'test_storage_size.txt' + url = 'http://aws.amazon.com/%s' % name + self.storage.bucket.meta.client.generate_presigned_url.return_value = url + self.storage.bucket.name = 'bucket' + self.assertEqual(self.storage.url(name), url) + self.storage.bucket.meta.client.generate_presigned_url.assert_called_with( + 'get_object', + Params={'Bucket': self.storage.bucket.name, 'Key': name}, + ExpiresIn=self.storage.querystring_expire, + ) + + custom_expire = 123 + + self.assertEqual(self.storage.url(name, expire=custom_expire), url) + self.storage.bucket.meta.client.generate_presigned_url.assert_called_with( + 'get_object', + Params={'Bucket': self.storage.bucket.name, 'Key': name}, + ExpiresIn=custom_expire, + ) + + + def test_generated_url_is_encoded(self): + self.storage.custom_domain = "mock.cloudfront.net" + filename = "whacky & filename.mp4" + url = self.storage.url(filename) + parsed_url = urlparse.urlparse(url) + self.assertEqual(parsed_url.path, + "/whacky%20%26%20filename.mp4") + self.assertFalse(self.storage.bucket.meta.client.generate_presigned_url.called) + + def test_special_characters(self): + self.storage.custom_domain = "mock.cloudfront.net" + + name = "ãlöhâ.jpg" + content = ContentFile('new content') + self.storage.save(name, content) + self.storage.bucket.Object.assert_called_once_with(name) + + url = self.storage.url(name) + parsed_url = urlparse.urlparse(url) + self.assertEqual(parsed_url.path, "/%C3%A3l%C3%B6h%C3%A2.jpg") + + def test_strip_signing_parameters(self): + expected = 'http://bucket.s3-aws-region.amazonaws.com/foo/bar' + self.assertEqual(self.storage._strip_signing_parameters( + '%s?X-Amz-Date=12345678&X-Amz-Signature=Signature' % expected), expected) + self.assertEqual(self.storage._strip_signing_parameters( + '%s?expires=12345678&signature=Signature' % expected), expected) + + @skipIf(threading is None, 'Test requires threading') + def test_connection_threading(self): + connections = [] + + def thread_storage_connection(): + connections.append(self.storage.connection) + + for x in range(2): + t = threading.Thread(target=thread_storage_connection) + t.start() + t.join() + + # Connection for each thread needs to be unique + self.assertIsNot(connections[0], connections[1]) + + def test_location_leading_slash(self): + msg = ( + "S3Boto3Storage.location cannot begin with a leading slash. " + "Found '/'. Use '' instead." + ) + with self.assertRaises(ImproperlyConfigured, msg=msg): + s3boto3.S3Boto3Storage(location='/') + + def test_override_class_variable(self): + class MyStorage1(s3boto3.S3Boto3Storage): + location = 'foo1' + + storage = MyStorage1() + self.assertEqual(storage.location, 'foo1') + + class MyStorage2(s3boto3.S3Boto3Storage): + location = 'foo2' + + storage = MyStorage2() + self.assertEqual(storage.location, 'foo2') + + def test_override_init_argument(self): + storage = s3boto3.S3Boto3Storage(location='foo1') + self.assertEqual(storage.location, 'foo1') + storage = s3boto3.S3Boto3Storage(location='foo2') + self.assertEqual(storage.location, 'foo2') diff --git a/evennia/contrib/tests.py b/evennia/contrib/tests.py index e7932ac6ee..2bd3158a11 100644 --- a/evennia/contrib/tests.py +++ b/evennia/contrib/tests.py @@ -4,7 +4,6 @@ Testing suite for contrib folder """ -import sys import datetime from django.test import override_settings from evennia.commands.default.tests import CommandTest @@ -237,7 +236,6 @@ class TestRPSystem(EvenniaTest): from django.conf import settings from evennia.contrib import extended_room -from evennia import gametime from evennia.objects.objects import DefaultRoom @@ -2128,7 +2126,6 @@ class TestRandomStringGenerator(EvenniaTest): import itertools from evennia.contrib import puzzles from evennia.utils import search -from evennia.utils.utils import inherits_from class TestPuzzles(CommandTest): @@ -3093,7 +3090,7 @@ class TestPuzzles(CommandTest): # Tests for the building_menu contrib -from evennia.contrib.building_menu import BuildingMenu, CmdNoInput, CmdNoMatch +from evennia.contrib.building_menu import BuildingMenu, CmdNoMatch class Submenu(BuildingMenu): @@ -3265,719 +3262,3 @@ class TestBuildingMenu(CommandTest): self.assertEqual(self.char1.ndb._building_menu.obj, self.room1) self.call(CmdNoMatch(building_menu=self.menu), "q") self.assertEqual(self.exit.key, "in") - -import gzip -import pickle -import threading -import warnings -from datetime import datetime -from unittest import skipIf - -from botocore.exceptions import ClientError -from django.core.exceptions import ImproperlyConfigured -from django.core.files.base import ContentFile -from django.test import TestCase -from django.utils.timezone import is_aware, utc - -from evennia.contrib import aws_s3_cdn as s3boto3 - -try: - from django.utils.six.moves.urllib import parse as urlparse -except ImportError: - from urllib import parse as urlparse - - -try: - from unittest import mock -except ImportError: # Python 3.2 and below - import mock - - -class S3Boto3TestCase(TestCase): - def setUp(self): - self.storage = s3boto3.S3Boto3Storage() - self.storage._connections.connection = mock.MagicMock() - - -class S3Boto3StorageTests(S3Boto3TestCase): - - def test_clean_name(self): - """ - Test the base case of _clean_name - """ - path = self.storage._clean_name("path/to/somewhere") - self.assertEqual(path, "path/to/somewhere") - - def test_clean_name_normalize(self): - """ - Test the normalization of _clean_name - """ - path = self.storage._clean_name("path/to/../somewhere") - self.assertEqual(path, "path/somewhere") - - def test_clean_name_trailing_slash(self): - """ - Test the _clean_name when the path has a trailing slash - """ - path = self.storage._clean_name("path/to/somewhere/") - self.assertEqual(path, "path/to/somewhere/") - - def test_clean_name_windows(self): - """ - Test the _clean_name when the path has a trailing slash - """ - path = self.storage._clean_name("path\\to\\somewhere") - self.assertEqual(path, "path/to/somewhere") - - def test_pickle_with_bucket(self): - """ - Test that the storage can be pickled with a bucket attached - """ - # Ensure the bucket has been used - self.storage.bucket - self.assertIsNotNone(self.storage._bucket) - - # Can't pickle MagicMock, but you can't pickle a real Bucket object either - p = pickle.dumps(self.storage) - new_storage = pickle.loads(p) - - self.assertIsInstance(new_storage._connections, threading.local) - # Put the mock connection back in - new_storage._connections.connection = mock.MagicMock() - - self.assertIsNone(new_storage._bucket) - new_storage.bucket - self.assertIsNotNone(new_storage._bucket) - - def test_pickle_without_bucket(self): - """ - Test that the storage can be pickled, without a bucket instance - """ - - # Can't pickle a threadlocal - p = pickle.dumps(self.storage) - new_storage = pickle.loads(p) - - self.assertIsInstance(new_storage._connections, threading.local) - - def test_storage_url_slashes(self): - """ - Test URL generation. - """ - self.storage.custom_domain = 'example.com' - - # We expect no leading slashes in the path, - # and trailing slashes should be preserved. - self.assertEqual(self.storage.url(''), 'https://example.com/') - self.assertEqual(self.storage.url('path'), 'https://example.com/path') - self.assertEqual(self.storage.url('path/'), 'https://example.com/path/') - self.assertEqual(self.storage.url('path/1'), 'https://example.com/path/1') - self.assertEqual(self.storage.url('path/1/'), 'https://example.com/path/1/') - - def test_storage_save(self): - """ - Test saving a file - """ - name = 'test_storage_save.txt' - content = ContentFile('new content') - self.storage.save(name, content) - self.storage.bucket.Object.assert_called_once_with(name) - - obj = self.storage.bucket.Object.return_value - obj.upload_fileobj.assert_called_with( - content, - ExtraArgs={ - 'ContentType': 'text/plain', - 'ACL': self.storage.default_acl, - } - ) - - def test_storage_save_with_acl(self): - """ - Test saving a file with user defined ACL. - """ - name = 'test_storage_save.txt' - content = ContentFile('new content') - self.storage.default_acl = 'private' - self.storage.save(name, content) - self.storage.bucket.Object.assert_called_once_with(name) - - obj = self.storage.bucket.Object.return_value - obj.upload_fileobj.assert_called_with( - content, - ExtraArgs={ - 'ContentType': 'text/plain', - 'ACL': 'private', - } - ) - - def test_content_type(self): - """ - Test saving a file with a None content type. - """ - name = 'test_image.jpg' - content = ContentFile('data') - content.content_type = None - self.storage.save(name, content) - self.storage.bucket.Object.assert_called_once_with(name) - - obj = self.storage.bucket.Object.return_value - obj.upload_fileobj.assert_called_with( - content, - ExtraArgs={ - 'ContentType': 'image/jpeg', - 'ACL': self.storage.default_acl, - } - ) - - def test_storage_save_gzipped(self): - """ - Test saving a gzipped file - """ - name = 'test_storage_save.gz' - content = ContentFile("I am gzip'd") - self.storage.save(name, content) - obj = self.storage.bucket.Object.return_value - obj.upload_fileobj.assert_called_with( - content, - ExtraArgs={ - 'ContentType': 'application/octet-stream', - 'ContentEncoding': 'gzip', - 'ACL': self.storage.default_acl, - } - ) - - def test_storage_save_gzip(self): - """ - Test saving a file with gzip enabled. - """ - self.storage.gzip = True - name = 'test_storage_save.css' - content = ContentFile("I should be gzip'd") - self.storage.save(name, content) - obj = self.storage.bucket.Object.return_value - obj.upload_fileobj.assert_called_with( - mock.ANY, - ExtraArgs={ - 'ContentType': 'text/css', - 'ContentEncoding': 'gzip', - 'ACL': self.storage.default_acl, - } - ) - args, kwargs = obj.upload_fileobj.call_args - content = args[0] - zfile = gzip.GzipFile(mode='rb', fileobj=content) - self.assertEqual(zfile.read(), b"I should be gzip'd") - - def test_storage_save_gzip_twice(self): - """ - Test saving the same file content twice with gzip enabled. - """ - # Given - self.storage.gzip = True - name = 'test_storage_save.css' - content = ContentFile("I should be gzip'd") - - # When - self.storage.save(name, content) - self.storage.save('test_storage_save_2.css', content) - - # Then - obj = self.storage.bucket.Object.return_value - obj.upload_fileobj.assert_called_with( - mock.ANY, - ExtraArgs={ - 'ContentType': 'text/css', - 'ContentEncoding': 'gzip', - 'ACL': self.storage.default_acl, - } - ) - args, kwargs = obj.upload_fileobj.call_args - content = args[0] - zfile = gzip.GzipFile(mode='rb', fileobj=content) - self.assertEqual(zfile.read(), b"I should be gzip'd") - - def test_compress_content_len(self): - """ - Test that file returned by _compress_content() is readable. - """ - self.storage.gzip = True - content = ContentFile("I should be gzip'd") - content = self.storage._compress_content(content) - self.assertTrue(len(content.read()) > 0) - - def test_storage_open_write(self): - """ - Test opening a file in write mode - """ - name = 'test_open_for_writïng.txt' - content = 'new content' - - # Set the encryption flag used for multipart uploads - self.storage.encryption = True - self.storage.reduced_redundancy = True - self.storage.default_acl = 'public-read' - - file = self.storage.open(name, 'w') - self.storage.bucket.Object.assert_called_with(name) - obj = self.storage.bucket.Object.return_value - # Set the name of the mock object - obj.key = name - - file.write(content) - obj.initiate_multipart_upload.assert_called_with( - ACL='public-read', - ContentType='text/plain', - ServerSideEncryption='AES256', - StorageClass='REDUCED_REDUNDANCY' - ) - - # Save the internal file before closing - multipart = obj.initiate_multipart_upload.return_value - multipart.parts.all.return_value = [mock.MagicMock(e_tag='123', part_number=1)] - file.close() - multipart.Part.assert_called_with(1) - part = multipart.Part.return_value - part.upload.assert_called_with(Body=content.encode('utf-8')) - multipart.complete.assert_called_once_with( - MultipartUpload={'Parts': [{'ETag': '123', 'PartNumber': 1}]}) - - def test_storage_open_no_write(self): - """ - Test opening file in write mode and closing without writing. - - A file should be created as by obj.put(...). - """ - name = 'test_open_no_write.txt' - - # Set the encryption flag used for puts - self.storage.encryption = True - self.storage.reduced_redundancy = True - self.storage.default_acl = 'public-read' - - file = self.storage.open(name, 'w') - self.storage.bucket.Object.assert_called_with(name) - obj = self.storage.bucket.Object.return_value - obj.load.side_effect = ClientError({'Error': {}, - 'ResponseMetadata': {'HTTPStatusCode': 404}}, - 'head_bucket') - - # Set the name of the mock object - obj.key = name - - # Save the internal file before closing - file.close() - - obj.load.assert_called_once_with() - obj.put.assert_called_once_with( - ACL='public-read', - Body=b"", - ContentType='text/plain', - ServerSideEncryption='AES256', - StorageClass='REDUCED_REDUNDANCY' - ) - - def test_storage_open_no_overwrite_existing(self): - """ - Test opening an existing file in write mode and closing without writing. - """ - name = 'test_open_no_overwrite_existing.txt' - - # Set the encryption flag used for puts - self.storage.encryption = True - self.storage.reduced_redundancy = True - self.storage.default_acl = 'public-read' - - file = self.storage.open(name, 'w') - self.storage.bucket.Object.assert_called_with(name) - obj = self.storage.bucket.Object.return_value - - # Set the name of the mock object - obj.key = name - - # Save the internal file before closing - file.close() - - obj.load.assert_called_once_with() - obj.put.assert_not_called() - - def test_storage_write_beyond_buffer_size(self): - """ - Test writing content that exceeds the buffer size - """ - name = 'test_open_for_writïng_beyond_buffer_size.txt' - - # Set the encryption flag used for multipart uploads - self.storage.encryption = True - self.storage.reduced_redundancy = True - self.storage.default_acl = 'public-read' - - file = self.storage.open(name, 'w') - self.storage.bucket.Object.assert_called_with(name) - obj = self.storage.bucket.Object.return_value - # Set the name of the mock object - obj.key = name - - # Initiate the multipart upload - file.write('') - obj.initiate_multipart_upload.assert_called_with( - ACL='public-read', - ContentType='text/plain', - ServerSideEncryption='AES256', - StorageClass='REDUCED_REDUNDANCY' - ) - multipart = obj.initiate_multipart_upload.return_value - - # Write content at least twice as long as the buffer size - written_content = '' - counter = 1 - while len(written_content) < 2 * file.buffer_size: - content = 'hello, aws {counter}\n'.format(counter=counter) - # Write more than just a few bytes in each iteration to keep the - # test reasonably fast - content += '*' * int(file.buffer_size / 10) - file.write(content) - written_content += content - counter += 1 - - # Save the internal file before closing - multipart.parts.all.return_value = [ - mock.MagicMock(e_tag='123', part_number=1), - mock.MagicMock(e_tag='456', part_number=2) - ] - file.close() - self.assertListEqual( - multipart.Part.call_args_list, - [mock.call(1), mock.call(2)] - ) - part = multipart.Part.return_value - uploaded_content = ''.join( - args_list[1]['Body'].decode('utf-8') - for args_list in part.upload.call_args_list - ) - self.assertEqual(uploaded_content, written_content) - multipart.complete.assert_called_once_with( - MultipartUpload={'Parts': [ - {'ETag': '123', 'PartNumber': 1}, - {'ETag': '456', 'PartNumber': 2}, - ]} - ) - - def test_auto_creating_bucket(self): - self.storage.auto_create_bucket = True - Bucket = mock.MagicMock() - self.storage._connections.connection.Bucket.return_value = Bucket - self.storage._connections.connection.meta.client.meta.region_name = 'sa-east-1' - - Bucket.meta.client.head_bucket.side_effect = ClientError({'Error': {}, - 'ResponseMetadata': {'HTTPStatusCode': 404}}, - 'head_bucket') - self.storage._get_or_create_bucket('testbucketname') - Bucket.create.assert_called_once_with( - ACL='public-read', - CreateBucketConfiguration={ - 'LocationConstraint': 'sa-east-1', - } - ) - - def test_auto_creating_bucket_with_acl(self): - self.storage.auto_create_bucket = True - self.storage.bucket_acl = 'public-read' - Bucket = mock.MagicMock() - self.storage._connections.connection.Bucket.return_value = Bucket - self.storage._connections.connection.meta.client.meta.region_name = 'sa-east-1' - - Bucket.meta.client.head_bucket.side_effect = ClientError({'Error': {}, - 'ResponseMetadata': {'HTTPStatusCode': 404}}, - 'head_bucket') - self.storage._get_or_create_bucket('testbucketname') - Bucket.create.assert_called_once_with( - ACL='public-read', - CreateBucketConfiguration={ - 'LocationConstraint': 'sa-east-1', - } - ) - - def test_storage_exists(self): - self.assertTrue(self.storage.exists("file.txt")) - self.storage.connection.meta.client.head_object.assert_called_with( - Bucket=self.storage.bucket_name, - Key="file.txt", - ) - - def test_storage_exists_false(self): - self.storage.connection.meta.client.head_object.side_effect = ClientError( - {'Error': {'Code': '404', 'Message': 'Not Found'}}, - 'HeadObject', - ) - self.assertFalse(self.storage.exists("file.txt")) - self.storage.connection.meta.client.head_object.assert_called_with( - Bucket=self.storage.bucket_name, - Key='file.txt', - ) - - def test_storage_exists_doesnt_create_bucket(self): - with mock.patch.object(self.storage, '_get_or_create_bucket') as method: - self.storage.exists('file.txt') - self.assertFalse(method.called) - - def test_storage_delete(self): - self.storage.delete("path/to/file.txt") - self.storage.bucket.Object.assert_called_with('path/to/file.txt') - self.storage.bucket.Object.return_value.delete.assert_called_with() - - def test_storage_listdir_base(self): - # Files: - # some/path/1.txt - # 2.txt - # other/path/3.txt - # 4.txt - pages = [ - { - 'CommonPrefixes': [ - {'Prefix': 'some'}, - {'Prefix': 'other'}, - ], - 'Contents': [ - {'Key': '2.txt'}, - {'Key': '4.txt'}, - ], - }, - ] - - paginator = mock.MagicMock() - paginator.paginate.return_value = pages - self.storage._connections.connection.meta.client.get_paginator.return_value = paginator - - dirs, files = self.storage.listdir('') - paginator.paginate.assert_called_with(Bucket=None, Delimiter='/', Prefix='') - - self.assertEqual(dirs, ['some', 'other']) - self.assertEqual(files, ['2.txt', '4.txt']) - - def test_storage_listdir_subdir(self): - # Files: - # some/path/1.txt - # some/2.txt - pages = [ - { - 'CommonPrefixes': [ - {'Prefix': 'some/path'}, - ], - 'Contents': [ - {'Key': 'some/2.txt'}, - ], - }, - ] - - paginator = mock.MagicMock() - paginator.paginate.return_value = pages - self.storage._connections.connection.meta.client.get_paginator.return_value = paginator - - dirs, files = self.storage.listdir('some/') - paginator.paginate.assert_called_with(Bucket=None, Delimiter='/', Prefix='some/') - - self.assertEqual(dirs, ['path']) - self.assertEqual(files, ['2.txt']) - - def test_storage_size(self): - obj = self.storage.bucket.Object.return_value - obj.content_length = 4098 - - name = 'file.txt' - self.assertEqual(self.storage.size(name), obj.content_length) - - def test_storage_mtime(self): - # Test both USE_TZ cases - for use_tz in (True, False): - with self.settings(USE_TZ=use_tz): - self._test_storage_mtime(use_tz) - - def _test_storage_mtime(self, use_tz): - obj = self.storage.bucket.Object.return_value - obj.last_modified = datetime.now(utc) - - name = 'file.txt' - self.assertFalse( - is_aware(self.storage.modified_time(name)), - 'Naive datetime object expected from modified_time()' - ) - - self.assertIs( - settings.USE_TZ, - is_aware(self.storage.get_modified_time(name)), - '{} datetime object expected from get_modified_time() when USE_TZ={}'.format( - ('Naive', 'Aware')[settings.USE_TZ], - settings.USE_TZ - ) - ) - - def test_storage_url(self): - name = 'test_storage_size.txt' - url = 'http://aws.amazon.com/%s' % name - self.storage.bucket.meta.client.generate_presigned_url.return_value = url - self.storage.bucket.name = 'bucket' - self.assertEqual(self.storage.url(name), url) - self.storage.bucket.meta.client.generate_presigned_url.assert_called_with( - 'get_object', - Params={'Bucket': self.storage.bucket.name, 'Key': name}, - ExpiresIn=self.storage.querystring_expire, - HttpMethod=None, - ) - - custom_expire = 123 - - self.assertEqual(self.storage.url(name, expire=custom_expire), url) - self.storage.bucket.meta.client.generate_presigned_url.assert_called_with( - 'get_object', - Params={'Bucket': self.storage.bucket.name, 'Key': name}, - ExpiresIn=custom_expire, - HttpMethod=None, - ) - - custom_method = 'HEAD' - - self.assertEqual(self.storage.url(name, http_method=custom_method), url) - self.storage.bucket.meta.client.generate_presigned_url.assert_called_with( - 'get_object', - Params={'Bucket': self.storage.bucket.name, 'Key': name}, - ExpiresIn=self.storage.querystring_expire, - HttpMethod=custom_method, - ) - - def test_generated_url_is_encoded(self): - self.storage.custom_domain = "mock.cloudfront.net" - filename = "whacky & filename.mp4" - url = self.storage.url(filename) - parsed_url = urlparse.urlparse(url) - self.assertEqual(parsed_url.path, - "/whacky%20%26%20filename.mp4") - self.assertFalse(self.storage.bucket.meta.client.generate_presigned_url.called) - - def test_special_characters(self): - self.storage.custom_domain = "mock.cloudfront.net" - - name = "ãlöhâ.jpg" - content = ContentFile('new content') - self.storage.save(name, content) - self.storage.bucket.Object.assert_called_once_with(name) - - url = self.storage.url(name) - parsed_url = urlparse.urlparse(url) - self.assertEqual(parsed_url.path, "/%C3%A3l%C3%B6h%C3%A2.jpg") - - def test_strip_signing_parameters(self): - expected = 'http://bucket.s3-aws-region.amazonaws.com/foo/bar' - self.assertEqual(self.storage._strip_signing_parameters( - '%s?X-Amz-Date=12345678&X-Amz-Signature=Signature' % expected), expected) - self.assertEqual(self.storage._strip_signing_parameters( - '%s?expires=12345678&signature=Signature' % expected), expected) - - @skipIf(threading is None, 'Test requires threading') - def test_connection_threading(self): - connections = [] - - def thread_storage_connection(): - connections.append(self.storage.connection) - - for x in range(2): - t = threading.Thread(target=thread_storage_connection) - t.start() - t.join() - - # Connection for each thread needs to be unique - self.assertIsNot(connections[0], connections[1]) - - def test_location_leading_slash(self): - msg = ( - "S3Boto3Storage.location cannot begin with a leading slash. " - "Found '/'. Use '' instead." - ) - with self.assertRaises(ImproperlyConfigured, msg=msg): - s3boto3.S3Boto3Storage(location='/') - - def test_deprecated_acl(self): - with override_settings(AWS_DEFAULT_ACL=None), warnings.catch_warnings(record=True) as w: - s3boto3.S3Boto3Storage(acl='private') - assert len(w) == 1 - assert issubclass(w[-1].category, DeprecationWarning) - message = ( - "The acl argument of S3Boto3Storage is deprecated. Use argument " - "default_acl or setting AWS_DEFAULT_ACL instead. The acl argument " - "will be removed in version 1.10." - ) - assert str(w[-1].message) == message - - def test_deprecated_bucket(self): - with override_settings(AWS_DEFAULT_ACL=None), warnings.catch_warnings(record=True) as w: - s3boto3.S3Boto3Storage(bucket='django') - assert len(w) == 1 - assert issubclass(w[-1].category, DeprecationWarning) - message = ( - "The bucket argument of S3Boto3Storage is deprecated. Use argument " - "bucket_name or setting AWS_STORAGE_BUCKET_NAME instead. The bucket " - "argument will be removed in version 1.10." - ) - assert str(w[-1].message) == message - - def test_deprecated_default_acl(self): - with warnings.catch_warnings(record=True) as w: - s3boto3.S3Boto3Storage() - assert len(w) == 1 - message = ( - "The default behavior of S3Boto3Storage is insecure and will change " - "in django-storages 1.10. By default files and new buckets are saved " - "with an ACL of 'public-read' (globally publicly readable). Version 1.10 will " - "default to using the bucket's ACL. To opt into the new behavior set " - "AWS_DEFAULT_ACL = None, otherwise to silence this warning explicitly " - "set AWS_DEFAULT_ACL." - ) - assert str(w[-1].message) == message - - def test_deprecated_autocreate_bucket(self): - with override_settings(AWS_DEFAULT_ACL=None), warnings.catch_warnings(record=True) as w: - s3boto3.S3Boto3Storage(auto_create_bucket=True) - assert len(w) == 1 - assert issubclass(w[-1].category, DeprecationWarning) - message = ( - "Automatic bucket creation will be removed in version 1.10. It encourages " - "using overly broad credentials with this library. Either create it before " - "manually or use one of a myriad of automatic configuration management tools. " - "Unset AWS_AUTO_CREATE_BUCKET (it defaults to False) to silence this warning." - ) - assert str(w[-1].message) == message - - def test_deprecated_default_acl_override_class_variable(self): - class MyStorage(s3boto3.S3Boto3Storage): - default_acl = "private" - - with warnings.catch_warnings(record=True) as w: - MyStorage() - assert len(w) == 0 - - def test_override_settings(self): - with override_settings(AWS_LOCATION='foo1'): - storage = s3boto3.S3Boto3Storage() - self.assertEqual(storage.location, 'foo1') - with override_settings(AWS_LOCATION='foo2'): - storage = s3boto3.S3Boto3Storage() - self.assertEqual(storage.location, 'foo2') - - def test_override_class_variable(self): - class MyStorage1(s3boto3.S3Boto3Storage): - location = 'foo1' - - storage = MyStorage1() - self.assertEqual(storage.location, 'foo1') - - class MyStorage2(s3boto3.S3Boto3Storage): - location = 'foo2' - - storage = MyStorage2() - self.assertEqual(storage.location, 'foo2') - - def test_override_init_argument(self): - storage = s3boto3.S3Boto3Storage(location='foo1') - self.assertEqual(storage.location, 'foo1') - storage = s3boto3.S3Boto3Storage(location='foo2') - self.assertEqual(storage.location, 'foo2')