From ad3ce14eb6db10b179ac1d834f1fef5df3d1445e Mon Sep 17 00:00:00 2001 From: Josh Schneier Date: Sun, 10 Jul 2016 23:08:33 -0400 Subject: [PATCH 001/174] Py3.5 (#171) * Enable Python 3.5 builds on Travis * Fix Python 3.5 test failure from unittest.mock api change --- .travis.yml | 9 +++------ tests/test_s3boto.py | 2 +- 2 files changed, 4 insertions(+), 7 deletions(-) diff --git a/.travis.yml b/.travis.yml index 926d3fea2..081ae5ad0 100644 --- a/.travis.yml +++ b/.travis.yml @@ -1,6 +1,9 @@ sudo: false language: python +python: + - 3.5 + env: - TOX_ENV=py27-django17 - TOX_ENV=py33-django17 @@ -13,12 +16,6 @@ env: - TOX_ENV=py34-django19 - TOX_ENV=py35-django19 -matrix: - # Python 3.5 not yet available on travis, watch this to see when it is. - allow_failures: - - env: TOX_ENV=py35-django18 - - env: TOX_ENV=py35-django19 - before_install: - pip install codecov diff --git a/tests/test_s3boto.py b/tests/test_s3boto.py index 034737c19..f5d4a85c8 100644 --- a/tests/test_s3boto.py +++ b/tests/test_s3boto.py @@ -214,7 +214,7 @@ def test_storage_open_write(self): file._multipart.upload_part_from_file.assert_called_with( _file, 1, headers=self.storage.headers, ) - file._multipart.complete_upload.assert_called_once() + file._multipart.complete_upload.assert_called_once_with() def test_storage_exists_bucket(self): self.storage._connection.get_bucket.side_effect = S3ResponseError(404, 'No bucket') From 18626fee6ba568a7550b7fddc0d54fab873e8b70 Mon Sep 17 00:00:00 2001 From: Josh Schneier Date: Sun, 10 Jul 2016 23:12:29 -0400 Subject: [PATCH 002/174] Update CHANGELOG for #171 [ci skip] --- CHANGELOG.rst | 2 ++ 1 file changed, 2 insertions(+) diff --git a/CHANGELOG.rst b/CHANGELOG.rst index a3ef4c311..47e14727c 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -5,8 +5,10 @@ django-storages change log ****************** * Fix ``MANIFEST.in`` to not ship ``.pyc`` files. (`#145`_) thanks @fladi +* Enable CI testing of Python3.5 and fix test failure from api change (`#171`_) thanks @tnir .. _#145: https://github.com/jschneier/django-storages/pull/145 +.. _#171: https://github.com/jschneier/django-storages/pull/171 1.4.1 (2016-04-07) ****************** From 8b5622134d80462aa69701b7a575c0db6e47c029 Mon Sep 17 00:00:00 2001 From: Martey Dodoo Date: Sun, 10 Jul 2016 23:36:17 -0400 Subject: [PATCH 003/174] Document AWS_QUERYSTRING_* settings. (#164) Add documentation for AWS_QUERYSTRING_AUTH and AWS_QUERYSTRING_EXPIRE. See https://bitbucket.org/david/django-storages/issues/51/please-improve-s3-storage-documentation and https://github.com/jschneier/django-storages/issues/139. --- docs/backends/amazon-S3.rst | 16 ++++++++++++++++ 1 file changed, 16 insertions(+) diff --git a/docs/backends/amazon-S3.rst b/docs/backends/amazon-S3.rst index ccdd3c429..4c29d4d10 100644 --- a/docs/backends/amazon-S3.rst +++ b/docs/backends/amazon-S3.rst @@ -40,10 +40,26 @@ If you'd like to set headers sent with each file of the storage:: 'Cache-Control': 'max-age=86400', } +``AWS_QUERYSTRING_AUTH`` (optional; default is ``True``) + +Setting ``AWS_QUERYSTRING_AUTH`` to ``False`` removes `query parameter +authentication`_ from generated URLs. This can be useful if your S3 buckets are +public. + +``AWS_QUERYSTRING_EXPIRE`` (optional; default is 3600 seconds) + +The number of seconds that a generated URL with `query parameter +authentication`_ is valid for. + + To allow ``django-admin.py`` collectstatic to automatically put your static files in your bucket set the following in your settings.py:: STATICFILES_STORAGE = 'storages.backends.s3boto.S3BotoStorage' + +.. _query parameter authentication: https://docs.aws.amazon.com/AmazonS3/latest/API/sigv4-query-string-auth.html + + Fields ------ From 77fba4ed7e1b2227d04a7a2696a239e559474a35 Mon Sep 17 00:00:00 2001 From: Camilo Nova Date: Sun, 10 Jul 2016 22:37:14 -0500 Subject: [PATCH 004/174] Is not a fork anymore (#149) This was needed when the name was django-storages-redux now is not the case. --- README.rst | 18 ------------------ 1 file changed, 18 deletions(-) diff --git a/README.rst b/README.rst index da1fc5349..e2b718d97 100644 --- a/README.rst +++ b/README.rst @@ -68,21 +68,3 @@ Contributing correctly. #. Bug me until I can merge your pull request. Also, don't forget to add yourself to ``AUTHORS``. - -Why Fork? -==================== -The BitBucket repo of the original django-storages has seen no commit applied -since March 2014 (it is currently December 2014) and no PyPi release since -March 2013 despite a wealth of bugfixes that were applied in that year-long -gap. There is plenty of community support for the django-storages project -(especially the S3BotoStorage piece) and I have a personal need for a Python3 -compatible version. - -All of the Python3 compatible forks that currently exist (and there are a few) -are lacking in some way. This can be anything from the fact that they don't -release to PyPi, have no ongoing testing, didn't apply many important bugfixes -that have occurred on the Bitbucket repo since forking or don't support older -versions of Python and Django (vital to finding bugs and keeping a large -community). For this fork I've done the small bit of work necessary to get a -tox + travis ci matrix going for all of the supported Python + Django versions. -In many cases the various forks are lacking in a few of the above ways. From cb1d709cdf19c4fc800f70844670841905ff86e1 Mon Sep 17 00:00:00 2001 From: Josh Schneier Date: Sun, 10 Jul 2016 23:37:50 -0400 Subject: [PATCH 005/174] Remove some vestigial language --- README.rst | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/README.rst b/README.rst index e2b718d97..6ea632893 100644 --- a/README.rst +++ b/README.rst @@ -38,8 +38,7 @@ This library is compatible with Django >= 1.7. It should also works with 1.6.2+ History ======= This repo began as a fork of the original library under the package name of django-storages-redux and -became the official successor (releasing under django-storages on PyPI) in February of 2016. The initial -reasons for the fork are explained at the bottom of this document. +became the official successor (releasing under django-storages on PyPI) in February of 2016. Found a Bug? Something Unsupported? =================================== From 3053816ddd602384702759116fc44bd30ae0600f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Nikolai=20R=C3=B8ed=20Kristiansen?= Date: Mon, 11 Jul 2016 19:20:29 +0200 Subject: [PATCH 006/174] SFTPStorage: Fix link to paramiko docs (#147) --- storages/backends/sftpstorage.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/storages/backends/sftpstorage.py b/storages/backends/sftpstorage.py index ce4c144e7..f2fd4e561 100644 --- a/storages/backends/sftpstorage.py +++ b/storages/backends/sftpstorage.py @@ -16,8 +16,8 @@ # SFTP_STORAGE_PARAMS (Optional) - A dictionary containing connection # parameters to be passed as keyword arguments to # paramiko.SSHClient().connect() (do not include hostname here). See -# http://www.lag.net/paramiko/docs/paramiko.SSHClient-class.html#connect for -# details +# http://docs.paramiko.org/en/latest/api/client.html#paramiko.client.SSHClient.connect +# for details # # SFTP_STORAGE_INTERACTIVE (Optional) - A boolean indicating whether to prompt # for a password if the connection cannot be made using keys, and there is not From 5639b4570df97e6ffadab5fbd43ada1684b5305f Mon Sep 17 00:00:00 2001 From: Anthony Monthe Date: Mon, 1 Aug 2016 12:36:44 +0100 Subject: [PATCH 007/174] Improved SFTP storage (#177) * Added parameters to SFTP __init__ and fix File.close() * Added tests for SFTP * Moved SFTP documentation from comments to docs --- docs/backends/sftp.rst | 65 +++++++++++++- requirements-tests.txt | 1 + storages/backends/sftpstorage.py | 80 +++++------------ tests/test_sftp.py | 146 +++++++++++++++++++++++++++++++ tox.ini | 1 + 5 files changed, 232 insertions(+), 61 deletions(-) create mode 100644 tests/test_sftp.py diff --git a/docs/backends/sftp.rst b/docs/backends/sftp.rst index 6f19f9f56..a754097d8 100644 --- a/docs/backends/sftp.rst +++ b/docs/backends/sftp.rst @@ -1,5 +1,68 @@ SFTP ==== -Take a look at the top of the backend's file for the documentation. +Settings +-------- +``SFTP_STORAGE_HOST`` + +The hostname where you want the files to be saved. + +``SFTP_STORAGE_ROOT`` + +The root directory on the remote host into which files should be placed. +Should work the same way that ``STATIC_ROOT`` works for local files. Must +include a trailing slash. + +``SFTP_STORAGE_PARAMS`` (Optional) + +A dictionary containing connection parameters to be passed as keyword +arguments to ``paramiko.SSHClient().connect()`` (do not include hostname here). +See `paramiko SSHClient.connect() documentation`_ for details + +.. _`paramiko SSHClient.connect() documentation`: http://docs.paramiko.org/en/latest/api/client.html#paramiko.client.SSHClient.connect + +``SFTP_STORAGE_INTERACTIVE`` (Optional) + +A boolean indicating whether to prompt for a password if the connection cannot +be made using keys, and there is not already a password in +``SFTP_STORAGE_PARAMS``. You can set this to ``True`` to enable interactive +login when running ``manage.py collectstatic``, for example. + +.. warning:: + + DO NOT set SFTP_STORAGE_INTERACTIVE to True if you are using this storage + for files being uploaded to your site by users, because you'll have no way + to enter the password when they submit the form.. + +``SFTP_STORAGE_FILE_MODE`` (Optional) + +A bitmask for setting permissions on newly-created files. See +`Python os.chmod documentation`_ for acceptable values. + + +``SFTP_STORAGE_DIR_MODE`` (Optional) + +A bitmask for setting permissions on newly-created directories. See +`Python os.chmod documentation`_ for acceptable values. + +.. note:: + + Hint: if you start the mode number with a 0 you can express it in octal + just like you would when doing "chmod 775 myfile" from bash. + +.. _`Python os.chmod documentation`: http://docs.python.org/library/os.html#os.chmod + +``SFTP_STORAGE_UID`` (Optional) + +UID of the account that should be set as owner of the files on the remote +host. You may have to be root to set this. + +``SFTP_STORAGE_GID`` (Optional) + +GID of the group that should be set on the files on the remote host. You have +to be a member of the group to set this. + +``SFTP_KNOWN_HOST_FILE`` (Optional) + +Absolute path of know host file, if it isn't set ``"~/.ssh/known_hosts"`` will be used. diff --git a/requirements-tests.txt b/requirements-tests.txt index 307b874a3..fba9c555b 100644 --- a/requirements-tests.txt +++ b/requirements-tests.txt @@ -3,3 +3,4 @@ pytest-cov==2.2.1 boto>=2.32.0 dropbox>=3.24 mock +paramiko diff --git a/storages/backends/sftpstorage.py b/storages/backends/sftpstorage.py index f2fd4e561..f219b5950 100644 --- a/storages/backends/sftpstorage.py +++ b/storages/backends/sftpstorage.py @@ -4,49 +4,6 @@ # License: MIT # # Modeled on the FTP storage by Rafal Jonca -# -# Settings: -# -# SFTP_STORAGE_HOST - The hostname where you want the files to be saved. -# -# SFTP_STORAGE_ROOT - The root directory on the remote host into which files -# should be placed. Should work the same way that STATIC_ROOT works for local -# files. Must include a trailing slash. -# -# SFTP_STORAGE_PARAMS (Optional) - A dictionary containing connection -# parameters to be passed as keyword arguments to -# paramiko.SSHClient().connect() (do not include hostname here). See -# http://docs.paramiko.org/en/latest/api/client.html#paramiko.client.SSHClient.connect -# for details -# -# SFTP_STORAGE_INTERACTIVE (Optional) - A boolean indicating whether to prompt -# for a password if the connection cannot be made using keys, and there is not -# already a password in SFTP_STORAGE_PARAMS. You can set this to True to -# enable interactive login when running 'manage.py collectstatic', for example. -# -# DO NOT set SFTP_STORAGE_INTERACTIVE to True if you are using this storage -# for files being uploaded to your site by users, because you'll have no way -# to enter the password when they submit the form.. -# -# SFTP_STORAGE_FILE_MODE (Optional) - A bitmask for setting permissions on -# newly-created files. See http://docs.python.org/library/os.html#os.chmod for -# acceptable values. -# -# SFTP_STORAGE_DIR_MODE (Optional) - A bitmask for setting permissions on -# newly-created directories. See -# http://docs.python.org/library/os.html#os.chmod for acceptable values. -# -# Hint: if you start the mode number with a 0 you can express it in octal -# just like you would when doing "chmod 775 myfile" from bash. -# -# SFTP_STORAGE_UID (Optional) - uid of the account that should be set as owner -# of the files on the remote host. You have to be root to set this. -# -# SFTP_STORAGE_GID (Optional) - gid of the group that should be set on the -# files on the remote host. You have to be a member of the group to set this. -# SFTP_KNOWN_HOST_FILE (Optional) - absolute path of know host file, if it isn't -# set "~/.ssh/known_hosts" will be used - import getpass import os @@ -59,29 +16,32 @@ from django.core.files.base import File from storages.compat import urlparse, BytesIO, Storage +from storages.utils import setting class SFTPStorage(Storage): - def __init__(self): - self._host = settings.SFTP_STORAGE_HOST + def __init__(self, host, params=None, interactive=None, file_mode=None, + dir_mode=None, uid=None, gid=None, known_host_file=None, + root_path=None, base_url=None): + self._host = host or settings('SFTP_STORAGE_HOST') - # if present, settings.SFTP_STORAGE_PARAMS should be a dict with params - # matching the keyword arguments to paramiko.SSHClient().connect(). So - # you can put username/password there. Or you can omit all that if - # you're using keys. - self._params = getattr(settings, 'SFTP_STORAGE_PARAMS', {}) - self._interactive = getattr(settings, 'SFTP_STORAGE_INTERACTIVE', - False) - self._file_mode = getattr(settings, 'SFTP_STORAGE_FILE_MODE', None) - self._dir_mode = getattr(settings, 'SFTP_STORAGE_DIR_MODE', None) + self._params = params or setting('SFTP_STORAGE_PARAMS', {}) + self._interactive = setting('SFTP_STORAGE_INTERACTIVE', False) \ + if interactive is None else interactive + self._file_mode = setting('SFTP_STORAGE_FILE_MODE') \ + if file_mode is None else file_mode + self._dir_mode = setting('SFTP_STORAGE_DIR_MODE') if \ + dir_mode is None else dir_mode - self._uid = getattr(settings, 'SFTP_STORAGE_UID', None) - self._gid = getattr(settings, 'SFTP_STORAGE_GID', None) - self._known_host_file = getattr(settings, 'SFTP_KNOWN_HOST_FILE', None) + self._uid = setting('SFTP_STORAGE_UID') if uid is None else uid + self._gid = setting('SFTP_STORAGE_GID') if gid is None else gid + self._known_host_file = setting('SFTP_KNOWN_HOST_FILE') \ + if known_host_file is None else known_host_file - self._root_path = settings.SFTP_STORAGE_ROOT - self._base_url = settings.MEDIA_URL + self._root_path = setting('SFTP_STORAGE_ROOT', '') \ + if root_path is None else root_path + self._base_url = setting('MEDIA_URL') if base_url is None else base_url # for now it's all posix paths. Maybe someday we'll support figuring # out if the remote host is windows. @@ -263,5 +223,5 @@ def write(self, content): def close(self): if self._is_dirty: - self._storage._save(self._name, self.file.getvalue()) + self._storage._save(self._name, self) self.file.close() diff --git a/tests/test_sftp.py b/tests/test_sftp.py new file mode 100644 index 000000000..e31ef445e --- /dev/null +++ b/tests/test_sftp.py @@ -0,0 +1,146 @@ +import stat +from datetime import datetime +try: + from unittest.mock import patch, MagicMock +except ImportError: # Python 3.2 and below + from mock import patch, MagicMock +from django.test import TestCase +from django.core.files.base import File +from django.utils.six import BytesIO +from storages.backends import sftpstorage + + +class SFTPStorageTest(TestCase): + def setUp(self): + self.storage = sftpstorage.SFTPStorage('foo') + + def test_init(self): + pass + + @patch('paramiko.SSHClient') + def test_connect(self, mock_ssh): + self.storage._connect() + self.assertEqual('foo', mock_ssh.return_value.connect.call_args[0][0]) + + def test_open(self): + file_ = self.storage._open('foo') + self.assertIsInstance(file_, sftpstorage.SFTPStorageFile) + + @patch('storages.backends.sftpstorage.SFTPStorage.sftp') + def test_read(self, mock_sftp): + file_ = self.storage._read('foo') + self.assertTrue(mock_sftp.open.called) + + @patch('storages.backends.sftpstorage.SFTPStorage.sftp') + def test_chown(self, mock_sftp): + self.storage._chown('foo', 1, 1) + self.assertEqual(mock_sftp.chown.call_args[0], ('foo', 1, 1)) + + @patch('storages.backends.sftpstorage.SFTPStorage.sftp') + def test_mkdir(self, mock_sftp): + self.storage._mkdir('foo') + self.assertEqual(mock_sftp.mkdir.call_args[0], ('foo',)) + + @patch('storages.backends.sftpstorage.SFTPStorage.sftp', **{ + 'stat.side_effect': (IOError(), True) + }) + def test_mkdir_parent(self, mock_sftp): + self.storage._mkdir('bar/foo') + self.assertEqual(mock_sftp.mkdir.call_args_list[0][0], ('bar',)) + self.assertEqual(mock_sftp.mkdir.call_args_list[1][0], ('bar/foo',)) + + @patch('storages.backends.sftpstorage.SFTPStorage.sftp') + def test_save(self, mock_sftp): + self.storage._save('foo', File(BytesIO(b'foo'), 'foo')) + self.assertTrue(mock_sftp.open.return_value.write.called) + + @patch('storages.backends.sftpstorage.SFTPStorage.sftp', **{ + 'stat.side_effect': (IOError(), True) + }) + def test_save_in_subdir(self, mock_sftp): + self.storage._save('bar/foo', File(BytesIO(b'foo'), 'foo')) + self.assertEqual(mock_sftp.mkdir.call_args_list[0][0], ('bar',)) + self.assertTrue(mock_sftp.open.return_value.write.called) + + @patch('storages.backends.sftpstorage.SFTPStorage.sftp') + def test_delete(self, mock_sftp): + self.storage.delete('foo') + self.assertEqual(mock_sftp.remove.call_args_list[0][0], ('foo',)) + + @patch('storages.backends.sftpstorage.SFTPStorage.sftp') + def test_exists(self, mock_sftp): + self.assertTrue(self.storage.exists('foo')) + + @patch('storages.backends.sftpstorage.SFTPStorage.sftp', **{ + 'stat.side_effect': IOError() + }) + def test_not_exists(self, mock_sftp): + self.assertFalse(self.storage.exists('foo')) + + @patch('storages.backends.sftpstorage.SFTPStorage.sftp', **{ + 'listdir_attr.return_value': + [MagicMock(filename='foo', st_mode=stat.S_IFDIR), + MagicMock(filename='bar', st_mode=None)]}) + def test_listdir(self, mock_sftp): + dirs, files = self.storage.listdir('/') + self.assertTrue(dirs) + self.assertTrue(files) + + @patch('storages.backends.sftpstorage.SFTPStorage.sftp', **{ + 'stat.return_value.st_size': 42, + }) + def test_size(self, mock_sftp): + self.assertEqual(self.storage.size('foo'), 42) + + @patch('storages.backends.sftpstorage.SFTPStorage.sftp', **{ + 'stat.return_value.st_atime': 1469674684.000000, + }) + def test_accessed_time(self, mock_sftp): + self.assertEqual(self.storage.accessed_time('foo'), + datetime(2016, 7, 27, 21, 58, 4)) + + @patch('storages.backends.sftpstorage.SFTPStorage.sftp', **{ + 'stat.return_value.st_mtime': 1469674684.000000, + }) + def test_modified_time(self, mock_sftp): + self.assertEqual(self.storage.modified_time('foo'), + datetime(2016, 7, 27, 21, 58, 4)) + + def test_url(self): + self.assertEqual(self.storage.url('foo'), '/media/foo') + # Test custom + self.storage._base_url = 'http://bar.pt/' + self.assertEqual(self.storage.url('foo'), 'http://bar.pt/foo') + # Test error + with self.assertRaises(ValueError): + self.storage._base_url = None + self.storage.url('foo') + + +class SFTPStorageFileTest(TestCase): + def setUp(self): + self.storage = sftpstorage.SFTPStorage('foo') + self.file = sftpstorage.SFTPStorageFile('bar', self.storage, 'wb') + + @patch('storages.backends.sftpstorage.SFTPStorage.sftp', **{ + 'stat.return_value.st_size': 42, + }) + def test_size(self, mock_sftp): + self.assertEqual(self.file.size, 42) + + @patch('storages.backends.sftpstorage.SFTPStorage.sftp', **{ + 'open.return_value.read.return_value': b'foo', + }) + def test_read(self, mock_sftp): + self.assertEqual(self.file.read(), b'foo') + self.assertTrue(mock_sftp.open.called) + + def test_write(self): + self.file.write(b'foo') + self.assertEqual(self.file.file.read(), b'foo') + + @patch('storages.backends.sftpstorage.SFTPStorage.sftp') + def test_close(self, mock_sftp): + self.file.write(b'foo') + self.file.close() + self.assertTrue(mock_sftp.open.return_value.write.called) diff --git a/tox.ini b/tox.ini index 9cadef9a4..2f0464084 100644 --- a/tox.ini +++ b/tox.ini @@ -18,3 +18,4 @@ deps = boto>=2.32.0 pytest-cov==2.2.1 dropbox>=3.24 + paramiko From 29367fda967cf1300eb6083245d4f484366eca76 Mon Sep 17 00:00:00 2001 From: Anthony Monthe Date: Mon, 1 Aug 2016 12:39:25 +0100 Subject: [PATCH 008/174] Improved DropBox storage (#174) * Improved DropBox storage * Adeded DropBox doc --- docs/backends/dropbox.rst | 17 ++++++++++ storages/backends/dropbox.py | 55 +++++++++++++++++++------------ tests/test_dropbox.py | 63 +++++++++++++++++++++++++----------- 3 files changed, 96 insertions(+), 39 deletions(-) create mode 100644 docs/backends/dropbox.rst diff --git a/docs/backends/dropbox.rst b/docs/backends/dropbox.rst new file mode 100644 index 000000000..e884f10cf --- /dev/null +++ b/docs/backends/dropbox.rst @@ -0,0 +1,17 @@ +DropBox +======= + +Settings +-------- + + +``DROPBOX_OAUTH2_TOKEN`` + +Your DropBox token, if you haven't follow this `guide step`_. + +``DROPBOX_ROOT_PATH`` + +Allow to jail your storage to a defined directory. + + +.. _`guide step`: https://www.dropbox.com/developers/documentation/python#tutorial diff --git a/storages/backends/dropbox.py b/storages/backends/dropbox.py index dc1958757..fdeb9a70b 100644 --- a/storages/backends/dropbox.py +++ b/storages/backends/dropbox.py @@ -6,15 +6,19 @@ # # Add below to settings.py: # DROPBOX_OAUTH2_TOKEN = 'YourOauthToken' +# DROPBOX_ROOT_PATH = '/dir/' from __future__ import absolute_import from datetime import datetime +from tempfile import SpooledTemporaryFile +from shutil import copyfileobj from django.core.files.base import File from django.core.exceptions import ImproperlyConfigured +from django.utils._os import safe_join -from storages.compat import BytesIO, Storage +from storages.compat import Storage from storages.utils import setting from dropbox.client import DropboxClient @@ -28,39 +32,52 @@ class DropBoxStorageException(Exception): class DropBoxFile(File): - def __init__(self, name, storage, mode='rb'): + def __init__(self, name, storage): self.name = name self._storage = storage - def read(self, num_bytes=None): - return self._storage._read(self.name, num_bytes=num_bytes) - - def write(self, content): - self._storage._save(self.name, content) + @property + def file(self): + if not hasattr(self, '_file'): + response = self._storage.client.get_file(self.name) + self._file = SpooledTemporaryFile() + copyfileobj(response, self._file) + self._file.seek(0) + return self._file class DropBoxStorage(Storage): """DropBox Storage class for Django pluggable storage system.""" - def __init__(self, oauth2_access_token=setting('DROPBOX_OAUTH2_TOKEN')): + def __init__(self, oauth2_access_token=None, root_path=None): + oauth2_access_token = oauth2_access_token or setting('DROPBOX_OAUTH2_TOKEN') + self.root_path = root_path or setting('DROPBOX_ROOT_PATH', '/') if oauth2_access_token is None: raise ImproperlyConfigured("You must configure a token auth at" "'settings.DROPBOX_OAUTH2_TOKEN'.") self.client = DropboxClient(oauth2_access_token) + def _full_path(self, name): + if name == '/': + name = '' + return safe_join(self.root_path, name) + def delete(self, name): - self.client.file_delete(name) + self.client.file_delete(self._full_path(name)) def exists(self, name): try: - return bool(self.client.metadata(name)) + return bool(self.client.metadata(self._full_path(name))) except ErrorResponse: return False def listdir(self, path): directories, files = [], [] - metadata = self.client.metadata(path) + full_path = self._full_path(path) + metadata = self.client.metadata(full_path) for entry in metadata['contents']: + entry['path'] = entry['path'].replace(full_path, '', 1) + entry['path'] = entry['path'].replace('/', '', 1) if entry['is_dir']: directories.append(entry['path']) else: @@ -68,31 +85,27 @@ def listdir(self, path): return directories, files def size(self, name): - metadata = self.client.metadata(name) + metadata = self.client.metadata(self._full_path(name)) return metadata['bytes'] def modified_time(self, name): - metadata = self.client.metadata(name) + metadata = self.client.metadata(self._full_path(name)) mod_time = datetime.strptime(metadata['modified'], DATE_FORMAT) return mod_time def accessed_time(self, name): - metadata = self.client.metadata(name) + metadata = self.client.metadata(self._full_path(name)) acc_time = datetime.strptime(metadata['client_mtime'], DATE_FORMAT) return acc_time def url(self, name): - media = self.client.media(name) + media = self.client.media(self._full_path(name)) return media['url'] def _open(self, name, mode='rb'): - remote_file = DropBoxFile(name, self) + remote_file = DropBoxFile(self._full_path(name), self) return remote_file def _save(self, name, content): - self.client.put_file(name, content) + self.client.put_file(self._full_path(name), content) return name - - def _read(self, name, num_bytes=None): - data = self.client.get_file(name) - return data.read(num_bytes) diff --git a/tests/test_dropbox.py b/tests/test_dropbox.py index 41be28a13..a29d10468 100644 --- a/tests/test_dropbox.py +++ b/tests/test_dropbox.py @@ -7,6 +7,8 @@ from django.test import TestCase from django.core.files.base import File, ContentFile +from django.core.exceptions import ImproperlyConfigured, \ + SuspiciousFileOperation from storages.backends import dropbox @@ -64,7 +66,11 @@ class DropBoxTest(TestCase): re.compile(r'.*')) @mock.patch('dropbox.client.DropboxOAuth2Session') def setUp(self, *args): - self.storage = dropbox.DropBoxStorage('') + self.storage = dropbox.DropBoxStorage('foo') + + def test_no_access_token(self, *args): + with self.assertRaises(ImproperlyConfigured): + dropbox.DropBoxStorage(None) @mock.patch('dropbox.client.DropboxClient.file_delete', return_value=FILE_FIXTURE) @@ -89,8 +95,8 @@ def test_listdir(self, *args): dirs, files = self.storage.listdir('/') self.assertGreater(len(dirs), 0) self.assertGreater(len(files), 0) - self.assertEqual(dirs[0], '/bar') - self.assertEqual(files[0], '/foo.txt') + self.assertEqual(dirs[0], 'bar') + self.assertEqual(files[0], 'foo.txt') @mock.patch('dropbox.client.DropboxClient.metadata', return_value=FILE_FIXTURE) @@ -119,34 +125,55 @@ def test_open(self, *args): def test_save(self, *args): self.storage._save('foo', b'bar') - @mock.patch('dropbox.client.DropboxClient.get_file', - return_value=ContentFile('bar')) - def test_read(self, *args): - content = self.storage._read('foo') - self.assertEqual(content, 'bar') - @mock.patch('dropbox.client.DropboxClient.media', return_value=FILE_MEDIA_FIXTURE) def test_url(self, *args): url = self.storage.url('foo') self.assertEqual(url, FILE_MEDIA_FIXTURE['url']) + def test_formats(self, *args): + self.storage = dropbox.DropBoxStorage('foo') + files = self.storage._full_path('') + self.assertEqual(files, self.storage._full_path('/')) + self.assertEqual(files, self.storage._full_path('.')) + self.assertEqual(files, self.storage._full_path('..')) + self.assertEqual(files, self.storage._full_path('../..')) + class DropBoxFileTest(TestCase): @mock.patch('dropbox.client._OAUTH2_ACCESS_TOKEN_PATTERN', re.compile(r'.*')) @mock.patch('dropbox.client.DropboxOAuth2Session') def setUp(self, *args): - self.storage = dropbox.DropBoxStorage('') + self.storage = dropbox.DropBoxStorage('foo') self.file = dropbox.DropBoxFile('/foo.txt', self.storage) - @mock.patch('dropbox.client.DropboxClient.put_file', - return_value='foo') - def test_write(self, *args): - self.storage._save('foo', b'bar') - @mock.patch('dropbox.client.DropboxClient.get_file', - return_value=ContentFile('bar')) + return_value=ContentFile(b'bar')) def test_read(self, *args): - content = self.storage._read('foo') - self.assertEqual(content, 'bar') + file = self.storage._open(b'foo') + self.assertEqual(file.read(), b'bar') + + +@mock.patch('dropbox.client._OAUTH2_ACCESS_TOKEN_PATTERN', + re.compile(r'.*')) +@mock.patch('dropbox.client.DropboxOAuth2Session') +@mock.patch('dropbox.client.DropboxClient.metadata', + return_value={'contents': []}) +class DropBoxRootPathTest(TestCase): + def test_jailed(self, *args): + self.storage = dropbox.DropBoxStorage('foo', '/bar') + dirs, files = self.storage.listdir('/') + self.assertFalse(dirs) + self.assertFalse(files) + + def test_suspicious(self, *args): + self.storage = dropbox.DropBoxStorage('foo', '/bar') + with self.assertRaises((SuspiciousFileOperation, ValueError)): + self.storage._full_path('..') + + def test_formats(self, *args): + self.storage = dropbox.DropBoxStorage('foo', '/bar') + files = self.storage._full_path('') + self.assertEqual(files, self.storage._full_path('/')) + self.assertEqual(files, self.storage._full_path('.')) From 64af28d3741c2152205dbc0126d55215fed1c643 Mon Sep 17 00:00:00 2001 From: Anthony Monthe Date: Mon, 1 Aug 2016 17:49:50 +0100 Subject: [PATCH 009/174] Added FTP readlines and fix storage init argument (#175) * Added FTP readlines and fix storage init argument * Added FTP storage tests * Update doc --- docs/backends/ftp.rst | 12 +++ storages/backends/ftp.py | 22 +++- tests/test_ftp.py | 225 +++++++++++++++++++++++++++++++++++++++ 3 files changed, 255 insertions(+), 4 deletions(-) create mode 100644 tests/test_ftp.py diff --git a/docs/backends/ftp.rst b/docs/backends/ftp.rst index 6b894f0db..c8d8e6a8b 100644 --- a/docs/backends/ftp.rst +++ b/docs/backends/ftp.rst @@ -5,3 +5,15 @@ FTP This implementation was done preliminary for upload files in admin to remote FTP location and read them back on site by HTTP. It was tested mostly in this configuration, so read/write using FTPStorageFile class may break. +Settings +-------- + +``LOCATION`` + +URL of the server that hold the files. +Example ``'ftp://:@:'`` + +``BASE_URL`` + +URL that serves the files stored at this location. Defaults to the value of +your ``MEDIA_URL`` setting. diff --git a/storages/backends/ftp.py b/storages/backends/ftp.py index fd7ae4bb0..296612f7c 100644 --- a/storages/backends/ftp.py +++ b/storages/backends/ftp.py @@ -23,6 +23,7 @@ from django.core.exceptions import ImproperlyConfigured from storages.compat import urlparse, BytesIO, Storage +from storages.utils import setting class FTPStorageException(Exception): @@ -32,8 +33,14 @@ class FTPStorageException(Exception): class FTPStorage(Storage): """FTP Storage class for Django pluggable storage system.""" - def __init__(self, location=settings.FTP_STORAGE_LOCATION, - base_url=settings.MEDIA_URL): + def __init__(self, location=None, base_url=None): + location = location or setting('FTP_STORAGE_LOCATION') + if location is None: + raise ImproperlyConfigured("You must set a location at " + "instanciation or at " + " settings.FTP_STORAGE_LOCATION'.") + self.location = location + base_url = base_url or settings.MEDIA_URL self._config = self._decode_location(location) self._base_url = base_url self._connection = None @@ -134,6 +141,7 @@ def _read(self, name): self._connection.retrbinary('RETR ' + os.path.basename(name), memory_file.write) self._connection.cwd(pwd) + memory_file.seek(0) return memory_file except ftplib.all_errors: raise FTPStorageException('Error reading file %s' % name) @@ -184,7 +192,7 @@ def listdir(self, path): self._start_connection() try: dirs, files = self._get_dir_details(path) - return dirs.keys(), files.keys() + return list(dirs.keys()), list(files.keys()) except FTPStorageException: raise @@ -248,12 +256,18 @@ def size(self): self._size = self._storage.size(self.name) return self._size - def read(self, num_bytes=None): + def readlines(self): if not self._is_read: self._storage._start_connection() self.file = self._storage._read(self.name) self._is_read = True + return self.file.readlines() + def read(self, num_bytes=None): + if not self._is_read: + self._storage._start_connection() + self.file = self._storage._read(self.name) + self._is_read = True return self.file.read(num_bytes) def write(self, content): diff --git a/tests/test_ftp.py b/tests/test_ftp.py new file mode 100644 index 000000000..3b539e703 --- /dev/null +++ b/tests/test_ftp.py @@ -0,0 +1,225 @@ +try: + from unittest.mock import patch +except ImportError: + from mock import patch +from datetime import datetime + +from django.test import TestCase +from django.core.exceptions import ImproperlyConfigured +from django.core.files.base import File +from django.utils.six import BytesIO + +from storages.backends import ftp + +USER = 'foo' +PASSWORD = 'b@r' +HOST = 'localhost' +PORT = 2121 +URL = "ftp://{user}:{passwd}@{host}:{port}/".format(user=USER, passwd=PASSWORD, + host=HOST, port=PORT) + +LIST_FIXTURE = """drwxr-xr-x 2 ftp nogroup 4096 Jul 27 09:46 dir +-rw-r--r-- 1 ftp nogroup 1024 Jul 27 09:45 fi +-rw-r--r-- 1 ftp nogroup 2048 Jul 27 09:50 fi2""" + + +def list_retrlines(cmd, func): + for line in LIST_FIXTURE.splitlines(): + func(line) + + +class FTPTest(TestCase): + def setUp(self): + self.storage = ftp.FTPStorage(location=URL) + + def test_init_no_location(self): + with self.assertRaises(ImproperlyConfigured): + ftp.FTPStorage() + + @patch('storages.backends.ftp.setting', return_value=URL) + def test_init_location_from_setting(self, mock_setting): + storage = ftp.FTPStorage() + self.assertTrue(mock_setting.called) + self.assertEqual(storage.location, URL) + + def test_decode_location(self): + config = self.storage._decode_location(URL) + wanted_config = {'passwd': 'b@r', 'host': 'localhost', 'user': 'foo', 'active': False, 'path': '/', 'port': 2121} + self.assertEqual(config, wanted_config) + # Test active FTP + config = self.storage._decode_location('a'+URL) + wanted_config = {'passwd': 'b@r', 'host': 'localhost', 'user': 'foo', 'active': True, 'path': '/', 'port': 2121} + self.assertEqual(config, wanted_config) + + def test_decode_location_error(self): + with self.assertRaises(ImproperlyConfigured): + self.storage._decode_location('foo') + with self.assertRaises(ImproperlyConfigured): + self.storage._decode_location('http://foo.pt') + # TODO: Cannot not provide a port + # with self.assertRaises(ImproperlyConfigured): + # self.storage._decode_location('ftp://') + + @patch('ftplib.FTP') + def test_start_connection(self, mock_ftp): + self.storage._start_connection() + self.assertIsNotNone(self.storage._connection) + # Start active + storage = ftp.FTPStorage(location='a'+URL) + storage._start_connection() + + @patch('ftplib.FTP', **{'return_value.pwd.side_effect': IOError()}) + def test_start_connection_timeout(self, mock_ftp): + self.storage._start_connection() + self.assertIsNotNone(self.storage._connection) + + @patch('ftplib.FTP', **{'return_value.connect.side_effect': IOError()}) + def test_start_connection_error(self, mock_ftp): + with self.assertRaises(ftp.FTPStorageException): + self.storage._start_connection() + + @patch('ftplib.FTP', **{'return_value.quit.return_value': None}) + def test_disconnect(self, mock_ftp_quit): + self.storage._start_connection() + self.storage.disconnect() + self.assertIsNone(self.storage._connection) + + @patch('ftplib.FTP', **{'return_value.pwd.return_value': 'foo',}) + def test_mkremdirs(self, mock_ftp): + self.storage._start_connection() + self.storage._mkremdirs('foo/bar') + + @patch('ftplib.FTP', **{ + 'return_value.pwd.return_value': 'foo', + 'return_value.storbinary.return_value': None + }) + def test_put_file(self, mock_ftp): + self.storage._start_connection() + self.storage._put_file('foo', File(BytesIO(b'foo'), 'foo')) + + @patch('ftplib.FTP', **{ + 'return_value.pwd.return_value': 'foo', + 'return_value.storbinary.side_effect': IOError() + }) + def test_put_file_error(self, mock_ftp): + self.storage._start_connection() + with self.assertRaises(ftp.FTPStorageException): + self.storage._put_file('foo', File(BytesIO(b'foo'), 'foo')) + + def test_open(self): + remote_file = self.storage._open('foo') + self.assertIsInstance(remote_file, ftp.FTPStorageFile) + + @patch('ftplib.FTP', **{'return_value.pwd.return_value': 'foo'}) + def test_read(self, mock_ftp): + self.storage._start_connection() + self.storage._read('foo') + + @patch('ftplib.FTP', **{'return_value.pwd.side_effect': IOError()}) + def test_read(self, mock_ftp): + self.storage._start_connection() + with self.assertRaises(ftp.FTPStorageException): + self.storage._read('foo') + + @patch('ftplib.FTP', **{ + 'return_value.pwd.return_value': 'foo', + 'return_value.storbinary.return_value': None + }) + def test_save(self, mock_ftp): + self.storage._save('foo', File(BytesIO(b'foo'), 'foo')) + + @patch('ftplib.FTP', **{'return_value.sendcmd.return_value': '213 20160727094506'}) + def test_modified_time(self, mock_ftp): + self.storage._start_connection() + modif_date = self.storage.modified_time('foo') + self.assertEqual(modif_date, datetime(2016, 7, 27, 9, 45, 6)) + + @patch('ftplib.FTP', **{'return_value.sendcmd.return_value': '500'}) + def test_modified_time_error(self, mock_ftp): + self.storage._start_connection() + with self.assertRaises(ftp.FTPStorageException): + self.storage.modified_time('foo') + + @patch('ftplib.FTP', **{'return_value.retrlines': list_retrlines}) + def test_listdir(self, mock_retrlines): + dirs, files = self.storage.listdir('/') + self.assertEqual(len(dirs), 1) + self.assertEqual(dirs, ['dir']) + self.assertEqual(len(files), 2) + self.assertEqual(sorted(files), sorted(['fi', 'fi2'])) + + @patch('ftplib.FTP', **{'return_value.retrlines.side_effect': IOError()}) + def test_listdir_error(self, mock_ftp): + with self.assertRaises(ftp.FTPStorageException): + self.storage.listdir('/') + + @patch('ftplib.FTP', **{'return_value.nlst.return_value': ['foo', 'foo2']}) + def test_exists(self, mock_ftp): + self.assertTrue(self.storage.exists('foo')) + self.assertFalse(self.storage.exists('bar')) + + @patch('ftplib.FTP', **{'return_value.nlst.side_effect': IOError()}) + def test_exists_error(self, mock_ftp): + with self.assertRaises(ftp.FTPStorageException): + self.storage.exists('foo') + + @patch('ftplib.FTP', **{ + 'return_value.delete.return_value': None, + 'return_value.nlst.return_value': ['foo', 'foo2'] + }) + def test_delete(self, mock_ftp): + self.storage.delete('foo') + self.assertTrue(mock_ftp.return_value.delete.called) + + @patch('ftplib.FTP', **{'return_value.retrlines': list_retrlines}) + def test_size(self, mock_ftp): + self.assertEqual(1024, self.storage.size('fi')) + self.assertEqual(2048, self.storage.size('fi2')) + self.assertEqual(0, self.storage.size('bar')) + + @patch('ftplib.FTP', **{'return_value.retrlines.side_effect': IOError()}) + def test_size_error(self, mock_ftp): + self.assertEqual(0, self.storage.size('foo')) + + def test_url(self): + with self.assertRaises(ValueError): + self.storage._base_url = None + self.storage.url('foo') + self.storage = ftp.FTPStorage(location=URL, base_url='http://foo.bar/') + self.assertEqual('http://foo.bar/foo', self.storage.url('foo')) + + +class FTPStorageFileTest(TestCase): + def setUp(self): + self.storage = ftp.FTPStorage(location=URL) + + @patch('ftplib.FTP', **{'return_value.retrlines': list_retrlines}) + def test_size(self, mock_ftp): + file_ = ftp.FTPStorageFile('fi', self.storage, 'wb') + self.assertEqual(file_.size, 1024) + + @patch('ftplib.FTP', **{'return_value.pwd.return_value': 'foo'}) + @patch('storages.backends.ftp.FTPStorage._read', return_value=BytesIO(b'foo')) + def test_readlines(self, mock_ftp, mock_storage): + file_ = ftp.FTPStorageFile('fi', self.storage, 'wb') + self.assertEqual([b'foo'], file_.readlines()) + + @patch('ftplib.FTP', **{'return_value.pwd.return_value': 'foo'}) + @patch('storages.backends.ftp.FTPStorage._read', return_value=BytesIO(b'foo')) + def test_read(self, mock_ftp, mock_storage): + file_ = ftp.FTPStorageFile('fi', self.storage, 'wb') + self.assertEqual(b'foo', file_.read()) + + def test_write(self): + file_ = ftp.FTPStorageFile('fi', self.storage, 'wb') + file_.write(b'foo') + file_.seek(0) + self.assertEqual(file_.file.read(), b'foo') + + @patch('ftplib.FTP', **{'return_value.pwd.return_value': 'foo'}) + @patch('storages.backends.ftp.FTPStorage._read', return_value=BytesIO(b'foo')) + def test_close(self, mock_ftp, mock_storage): + file_ = ftp.FTPStorageFile('fi', self.storage, 'wb') + file_.is_dirty = True + file_.read() + file_.close() From 7e5e58ff98b00dc93676201999ecf42b648ef667 Mon Sep 17 00:00:00 2001 From: Tomek Falkiewicz Date: Mon, 1 Aug 2016 18:51:07 +0200 Subject: [PATCH 010/174] Added info about AWS_S3_ENCRYPTION. (#172) --- docs/backends/amazon-S3.rst | 3 +++ 1 file changed, 3 insertions(+) diff --git a/docs/backends/amazon-S3.rst b/docs/backends/amazon-S3.rst index 4c29d4d10..ee6d39fb8 100644 --- a/docs/backends/amazon-S3.rst +++ b/docs/backends/amazon-S3.rst @@ -59,6 +59,9 @@ To allow ``django-admin.py`` collectstatic to automatically put your static file .. _query parameter authentication: https://docs.aws.amazon.com/AmazonS3/latest/API/sigv4-query-string-auth.html +``AWS_S3_ENCRYPTION`` (optional; default is False) + +Enable server-side file encryption while at rest, by setting ``encrypt_key`` parameter to True. More info available here: http://boto.cloudhackers.com/en/latest/ref/s3.html Fields ------ From c8e902b4c9e95ada0aa12488e6fd26cfad47fe85 Mon Sep 17 00:00:00 2001 From: Anthony Monthe Date: Tue, 2 Aug 2016 12:49:16 +0100 Subject: [PATCH 011/174] Added strict mode to utils.setting() (#176) --- storages/utils.py | 19 ++++++++++++++++--- tests/test_utils.py | 16 ++++++++++++++++ 2 files changed, 32 insertions(+), 3 deletions(-) create mode 100644 tests/test_utils.py diff --git a/storages/utils.py b/storages/utils.py index 810e0c596..2f501b194 100644 --- a/storages/utils.py +++ b/storages/utils.py @@ -1,9 +1,22 @@ from django.conf import settings +from django.core.exceptions import ImproperlyConfigured -def setting(name, default=None): +def setting(name, default=None, strict=False): """ - Helper function to get a Django setting by name or (optionally) return - a default (or else ``None``). + Helper function to get a Django setting by name. If setting doesn't exists + it can return a default or raise an error if in strict mode. + + :param name: Name of setting + :type name: str + :param default: Value if setting is unfound + :param strict: Define if return default value or raise an error + :type strict: bool + :returns: Setting's value + :raises: django.core.exceptions.ImproperlyConfigured if setting is unfound + and strict mode """ + if strict and not hasattr(settings, name): + msg = "You must provide settings.%s" % name + raise ImproperlyConfigured(msg) return getattr(settings, name, default) diff --git a/tests/test_utils.py b/tests/test_utils.py new file mode 100644 index 000000000..2e804b25e --- /dev/null +++ b/tests/test_utils.py @@ -0,0 +1,16 @@ +from django.test import TestCase +from django.conf import settings +from django.core.exceptions import ImproperlyConfigured +from storages import utils + + +class SettingTest(TestCase): + def test_get_setting(self): + value = utils.setting('SECRET_KEY') + self.assertEqual(settings.SECRET_KEY, value) + + def test_setting_unfound(self): + self.assertIsNone(utils.setting('FOO')) + self.assertEqual(utils.setting('FOO', 'bar'), 'bar') + with self.assertRaises(ImproperlyConfigured): + utils.setting('FOO', strict=True) From 1fc2ef1e5e91a9a9ca3043e826d4256ec77aa672 Mon Sep 17 00:00:00 2001 From: Josh Schneier Date: Tue, 2 Aug 2016 14:45:20 +0200 Subject: [PATCH 012/174] S3boto3 (#179) * New S3 Boto3 backend. * Based on existing s3boto module * Replace Boto2 headers settings with parameters * Does not support proxies, alternate host/port * S3Boto3 Updates from feedback and catching up to s3boto commits * Update s3boto3.py Non-existent file raises IOError in _open for backwards compatibility with s3boto Don't let the ClientError bubble up --- AUTHORS | 1 + requirements-tests.txt | 1 + storages/backends/s3boto3.py | 566 +++++++++++++++++++++++++++++++++++ tests/test_s3boto3.py | 313 +++++++++++++++++++ tox.ini | 1 + 5 files changed, 882 insertions(+) create mode 100644 storages/backends/s3boto3.py create mode 100644 tests/test_s3boto3.py diff --git a/AUTHORS b/AUTHORS index 8b34c4b33..0991c0ee7 100644 --- a/AUTHORS +++ b/AUTHORS @@ -24,6 +24,7 @@ By order of apparition, thanks: * Josh Schneier (Fork maintainer, Bugfixes, Py3K) * Anthony Monthe (Dropbox) * EunPyo (Andrew) Hong (Azure) + * Michael Barrientos (S3 with Boto3) Extra thanks to Marty for adding this in Django, you can buy his very interesting book (Pro Django). diff --git a/requirements-tests.txt b/requirements-tests.txt index fba9c555b..e9bf66d71 100644 --- a/requirements-tests.txt +++ b/requirements-tests.txt @@ -1,6 +1,7 @@ Django>=1.7 pytest-cov==2.2.1 boto>=2.32.0 +boto3>=1.2.3 dropbox>=3.24 mock paramiko diff --git a/storages/backends/s3boto3.py b/storages/backends/s3boto3.py new file mode 100644 index 000000000..afebc9a1c --- /dev/null +++ b/storages/backends/s3boto3.py @@ -0,0 +1,566 @@ +import os +import posixpath +import mimetypes +from gzip import GzipFile +from tempfile import SpooledTemporaryFile + +from django.core.files.base import File +from django.core.exceptions import ImproperlyConfigured, SuspiciousOperation +from django.utils.encoding import force_text, smart_str, filepath_to_uri, force_bytes + +try: + from boto3 import resource + from boto3 import __version__ as boto3_version + from botocore.client import Config + from botocore.exceptions import ClientError +except ImportError: + raise ImproperlyConfigured("Could not load Boto3's S3 bindings.\n" + "See https://github.com/boto/boto3") + +from storages.utils import setting +from storages.compat import urlparse, BytesIO, deconstructible, Storage + +boto3_version_info = tuple([int(i) for i in boto3_version.split('-')[0].split('.')]) + +if boto3_version_info[:2] < (1, 2): + raise ImproperlyConfigured("The installed Boto3 library must be 1.2.0 or " + "higher.\nSee https://github.com/boto/boto3") + + +def safe_join(base, *paths): + """ + A version of django.utils._os.safe_join for S3 paths. + + Joins one or more path components to the base path component + intelligently. Returns a normalized version of the final path. + + The final path must be located inside of the base path component + (otherwise a ValueError is raised). + + Paths outside the base path indicate a possible security + sensitive operation. + """ + base_path = force_text(base) + base_path = base_path.rstrip('/') + paths = [force_text(p) for p in paths] + + final_path = base_path + for path in paths: + final_path = urlparse.urljoin(final_path.rstrip('/') + "/", path) + + # Ensure final_path starts with base_path and that the next character after + # the final path is '/' (or nothing, in which case final_path must be + # equal to base_path). + base_path_len = len(base_path) + if (not final_path.startswith(base_path) or + final_path[base_path_len:base_path_len + 1] not in ('', '/')): + raise ValueError('the joined path is located outside of the base path' + ' component') + + return final_path.lstrip('/') + + +@deconstructible +class S3Boto3StorageFile(File): + + """ + The default file object used by the S3Boto3Storage backend. + + This file implements file streaming using boto's multipart + uploading functionality. The file can be opened in read or + write mode. + + This class extends Django's File class. However, the contained + data is only the data contained in the current buffer. So you + should not access the contained file object directly. You should + access the data via this class. + + Warning: This file *must* be closed using the close() method in + order to properly write the file to S3. Be sure to close the file + in your application. + """ + # TODO: Read/Write (rw) mode may be a bit undefined at the moment. Needs testing. + # TODO: When Django drops support for Python 2.5, rewrite to use the + # BufferedIO streams in the Python 2.6 io module. + buffer_size = setting('AWS_S3_FILE_BUFFER_SIZE', 5242880) + + def __init__(self, name, mode, storage, buffer_size=None): + self._storage = storage + self.name = name[len(self._storage.location):].lstrip('/') + self._mode = mode + self.obj = storage.bucket.Object(storage._encode_name(name)) + if 'w' not in mode: + # Force early RAII-style exception if object does not exist + self.obj.load() + self._is_dirty = False + self._file = None + self._multipart = None + # 5 MB is the minimum part size (if there is more than one part). + # Amazon allows up to 10,000 parts. The default supports uploads + # up to roughly 50 GB. Increase the part size to accommodate + # for files larger than this. + if buffer_size is not None: + self.buffer_size = buffer_size + self._write_counter = 0 + + @property + def size(self): + return self.obj.content_length + + def _get_file(self): + if self._file is None: + self._file = SpooledTemporaryFile( + max_size=self._storage.max_memory_size, + suffix=".S3Boto3StorageFile", + dir=setting("FILE_UPLOAD_TEMP_DIR", None) + ) + if 'r' in self._mode: + self._is_dirty = False + self._file.write(self.obj.get()['Body'].read()) + self._file.seek(0) + if self._storage.gzip and self.obj.content_encoding == 'gzip': + self._file = GzipFile(mode=self._mode, fileobj=self._file) + return self._file + + def _set_file(self, value): + self._file = value + + file = property(_get_file, _set_file) + + def read(self, *args, **kwargs): + if 'r' not in self._mode: + raise AttributeError("File was not opened in read mode.") + return super(S3Boto3StorageFile, self).read(*args, **kwargs) + + def write(self, content): + if 'w' not in self._mode: + raise AttributeError("File was not opened in write mode.") + self._is_dirty = True + if self._multipart is None: + parameters = self._storage.object_parameters.copy() + parameters['ACL'] = self._storage.default_acl + parameters['ContentType'] = (mimetypes.guess_type(self.obj.key)[0] or + self._storage.default_content_type) + if self._storage.reduced_redundancy: + parameters['StorageClass'] = 'REDUCED_REDUNDANCY' + if self._storage.encryption: + parameters['ServerSideEncryption'] = 'AES256' + self._multipart = self.obj.initiate_multipart_upload(**parameters) + if self.buffer_size <= self._buffer_file_size: + self._flush_write_buffer() + return super(S3Boto3StorageFile, self).write(force_bytes(content)) + + @property + def _buffer_file_size(self): + pos = self.file.tell() + self.file.seek(0, os.SEEK_END) + length = self.file.tell() + self.file.seek(pos) + return length + + def _flush_write_buffer(self): + """ + Flushes the write buffer. + """ + if self._buffer_file_size: + self._write_counter += 1 + self.file.seek(0) + part = self._multipart.Part(self._write_counter) + part.upload(Body=self.file.read()) + + def close(self): + if self._is_dirty: + self._flush_write_buffer() + # TODO: Possibly cache the part ids as they're being uploaded + # instead of requesting parts from server. For now, emulating + # s3boto's behavior. + parts = [{'ETag': part.e_tag, 'PartNumber': part.part_number} + for part in self._multipart.parts.all()] + self._multipart.complete( + MultipartUpload={'Parts': parts}) + else: + if self._multipart is not None: + self._multipart.abort() + if self._file is not None: + self._file.close() + self._file = None + + +@deconstructible +class S3Boto3Storage(Storage): + """ + Amazon Simple Storage Service using Boto3 + + This storage backend supports opening files in read or write + mode and supports streaming(buffering) data in chunks to S3 + when writing. + """ + connection_class = staticmethod(resource) + connection_service_name = 's3' + default_content_type = 'application/octet-stream' + connection_response_error = ClientError + file_class = S3Boto3StorageFile + # If config provided in init, signature_version and addressing_style settings/args are ignored. + config = None + + # used for looking up the access and secret key from env vars + access_key_names = ['AWS_S3_ACCESS_KEY_ID', 'AWS_ACCESS_KEY_ID'] + secret_key_names = ['AWS_S3_SECRET_ACCESS_KEY', 'AWS_SECRET_ACCESS_KEY'] + + access_key = setting('AWS_S3_ACCESS_KEY_ID', setting('AWS_ACCESS_KEY_ID')) + secret_key = setting('AWS_S3_SECRET_ACCESS_KEY', setting('AWS_SECRET_ACCESS_KEY')) + file_overwrite = setting('AWS_S3_FILE_OVERWRITE', True) + object_parameters = setting('AWS_S3_OBJECT_PARAMETERS', {}) + bucket_name = setting('AWS_STORAGE_BUCKET_NAME') + auto_create_bucket = setting('AWS_AUTO_CREATE_BUCKET', False) + default_acl = setting('AWS_DEFAULT_ACL', 'public-read') + bucket_acl = setting('AWS_BUCKET_ACL', default_acl) + querystring_auth = setting('AWS_QUERYSTRING_AUTH', True) + querystring_expire = setting('AWS_QUERYSTRING_EXPIRE', 3600) + signature_version = setting('AWS_S3_SIGNATURE_VERSION') + reduced_redundancy = setting('AWS_REDUCED_REDUNDANCY', False) + location = setting('AWS_LOCATION', '') + encryption = setting('AWS_S3_ENCRYPTION', False) + custom_domain = setting('AWS_S3_CUSTOM_DOMAIN') + addressing_style = setting('AWS_S3_ADDRESSING_STYLE') + secure_urls = setting('AWS_S3_SECURE_URLS', True) + file_name_charset = setting('AWS_S3_FILE_NAME_CHARSET', 'utf-8') + gzip = setting('AWS_IS_GZIPPED', False) + preload_metadata = setting('AWS_PRELOAD_METADATA', False) + gzip_content_types = setting('GZIP_CONTENT_TYPES', ( + 'text/css', + 'text/javascript', + 'application/javascript', + 'application/x-javascript', + 'image/svg+xml', + )) + url_protocol = setting('AWS_S3_URL_PROTOCOL', 'http:') + endpoint_url = setting('AWS_S3_ENDPOINT_URL', None) + region_name = setting('AWS_S3_REGION_NAME', None) + use_ssl = setting('AWS_S3_USE_SSL', True) + + # The max amount of memory a returned file can take up before being + # rolled over into a temporary file on disk. Default is 0: Do not roll over. + max_memory_size = setting('AWS_S3_MAX_MEMORY_SIZE', 0) + + def __init__(self, acl=None, bucket=None, **settings): + # check if some of the settings we've provided as class attributes + # need to be overwritten with values passed in here + for name, value in settings.items(): + if hasattr(self, name): + setattr(self, name, value) + + # For backward-compatibility of old differing parameter names + if acl is not None: + self.default_acl = acl + if bucket is not None: + self.bucket_name = bucket + + self.location = (self.location or '').lstrip('/') + # Backward-compatibility: given the anteriority of the SECURE_URL setting + # we fall back to https if specified in order to avoid the construction + # of unsecure urls. + if self.secure_urls: + self.url_protocol = 'https:' + + self._entries = {} + self._bucket = None + self._connection = None + + if not self.access_key and not self.secret_key: + self.access_key, self.secret_key = self._get_access_keys() + + if not self.config: + self.config = Config(s3={'addressing_style': self.addressing_style}, + signature_version=self.signature_version) + + @property + def connection(self): + # TODO: Support host, port like in s3boto + # Note that proxies are handled by environment variables that the underlying + # urllib/requests libraries read. See https://github.com/boto/boto3/issues/338 + # and http://docs.python-requests.org/en/latest/user/advanced/#proxies + if self._connection is None: + self._connection = self.connection_class( + self.connection_service_name, + aws_access_key_id=self.access_key, + aws_secret_access_key=self.secret_key, + region_name=self.region_name, + use_ssl=self.use_ssl, + endpoint_url=self.endpoint_url, + config=self.config + ) + return self._connection + + @property + def bucket(self): + """ + Get the current bucket. If there is no current bucket object + create it. + """ + if self._bucket is None: + self._bucket = self._get_or_create_bucket(self.bucket_name) + return self._bucket + + @property + def entries(self): + """ + Get the locally cached files for the bucket. + """ + if self.preload_metadata and not self._entries: + self._entries = dict((self._decode_name(entry.key), entry) + for entry in self.bucket.objects.filter(prefix=self.location)) + return self._entries + + def _get_access_keys(self): + """ + Gets the access keys to use when accessing S3. If none + are provided to the class in the constructor or in the + settings then get them from the environment variables. + """ + def lookup_env(names): + for name in names: + value = os.environ.get(name) + if value: + return value + access_key = self.access_key or lookup_env(self.access_key_names) + secret_key = self.secret_key or lookup_env(self.secret_key_names) + return access_key, secret_key + + def _get_or_create_bucket(self, name): + """ + Retrieves a bucket if it exists, otherwise creates it. + """ + bucket = self.connection.Bucket(name) + if self.auto_create_bucket: + try: + # Directly call head_bucket instead of bucket.load() because head_bucket() + # fails on wrong region, while bucket.load() does not. + bucket.meta.client.head_bucket(Bucket=name) + except self.connection_response_error as err: + if err.response['ResponseMetadata']['HTTPStatusCode'] == 301: + raise ImproperlyConfigured("Bucket %s exists, but in a different " + "region than we are connecting to. Set " + "the region to connect to by setting " + "AWS_S3_REGION_NAME to the correct region." % name) + # Notes: When using the us-east-1 Standard endpoint, you can create + # buckets in other regions. The same is not true when hitting region specific + # endpoints. However, when you create the bucket not in the same region, the + # connection will fail all future requests to the Bucket after the creation + # (301 Moved Permanently). + # + # For simplicity, we enforce in S3Boto3Storage that any auto-created + # bucket must match the region that the connection is for. + # + # Also note that Amazon specifically disallows "us-east-1" when passing bucket + # region names; LocationConstraint *must* be blank to create in US Standard. + bucket_params = {'ACL': self.bucket_acl} + region_name = self.connection.meta.client.meta.region_name + if region_name != 'us-east-1': + bucket_params['CreateBucketConfiguration'] = { + 'LocationConstraint': region_name} + bucket.create(ACL=self.bucket_acl) + else: + raise ImproperlyConfigured("Bucket %s does not exist. Buckets " + "can be automatically created by " + "setting AWS_AUTO_CREATE_BUCKET to " + "``True``." % name) + return bucket + + def _clean_name(self, name): + """ + Cleans the name so that Windows style paths work + """ + # Normalize Windows style paths + clean_name = posixpath.normpath(name).replace('\\', '/') + + # os.path.normpath() can strip trailing slashes so we implement + # a workaround here. + if name.endswith('/') and not clean_name.endswith('/'): + # Add a trailing slash as it was stripped. + return clean_name + '/' + else: + return clean_name + + def _normalize_name(self, name): + """ + Normalizes the name so that paths like /path/to/ignored/../something.txt + work. We check to make sure that the path pointed to is not outside + the directory specified by the LOCATION setting. + """ + try: + return safe_join(self.location, name) + except ValueError: + raise SuspiciousOperation("Attempted access to '%s' denied." % + name) + + def _encode_name(self, name): + return smart_str(name, encoding=self.file_name_charset) + + def _decode_name(self, name): + return force_text(name, encoding=self.file_name_charset) + + def _compress_content(self, content): + """Gzip a given string content.""" + zbuf = BytesIO() + zfile = GzipFile(mode='wb', compresslevel=6, fileobj=zbuf) + try: + zfile.write(force_bytes(content.read())) + finally: + zfile.close() + zbuf.seek(0) + # Boto 2 returned the InMemoryUploadedFile with the file pointer replaced, + # but Boto 3 seems to have issues with that. No need for fp.name in Boto3 + # so just returning the BytesIO directly + return zbuf + + def _open(self, name, mode='rb'): + name = self._normalize_name(self._clean_name(name)) + try: + f = self.file_class(name, mode, self) + except self.connection_response_error as err: + if err.response['ResponseMetadata']['HTTPStatusCode'] == 404: + raise IOError('File does not exist: %s' % name) + raise # Let it bubble up if it was some other error + return f + + def _save(self, name, content): + cleaned_name = self._clean_name(name) + name = self._normalize_name(cleaned_name) + parameters = self.object_parameters.copy() + content_type = getattr(content, 'content_type', + mimetypes.guess_type(name)[0] or self.default_content_type) + + # setting the content_type in the key object is not enough. + parameters.update({'ContentType': content_type}) + + if self.gzip and content_type in self.gzip_content_types: + content = self._compress_content(content) + parameters.update({'ContentEncoding': 'gzip'}) + + encoded_name = self._encode_name(name) + obj = self.bucket.Object(encoded_name) + if self.preload_metadata: + self._entries[encoded_name] = obj + + self._save_content(obj, content, parameters=parameters) + # Note: In boto3, after a put, last_modified is automatically reloaded + # the next time it is accessed; no need to specifically reload it. + return cleaned_name + + def _save_content(self, obj, content, parameters): + # only pass backwards incompatible arguments if they vary from the default + put_parameters = parameters.copy() if parameters else {} + if self.encryption: + put_parameters['ServerSideEncryption'] = 'AES256' + if self.reduced_redundancy: + put_parameters['StorageClass'] = 'REDUCED_REDUNDANCY' + if self.default_acl: + put_parameters['ACL'] = self.default_acl + content.seek(0, os.SEEK_SET) + obj.put(Body=content, **put_parameters) + + def delete(self, name): + name = self._normalize_name(self._clean_name(name)) + self.bucket.Object(self._encode_name(name)).delete() + + def exists(self, name): + if not name: + try: + self.bucket + return True + except ImproperlyConfigured: + return False + name = self._normalize_name(self._clean_name(name)) + if self.entries: + return name in self.entries + obj = self.bucket.Object(self._encode_name(name)) + try: + obj.load() + return True + except self.connection_response_error: + return False + + def listdir(self, name): + name = self._normalize_name(self._clean_name(name)) + # for the bucket.objects.filter and logic below name needs to end in / + # But for the root path "" we leave it as an empty string + if name and not name.endswith('/'): + name += '/' + + files = [] + dirs = set() + base_parts = name.split("/")[:-1] + for item in self.bucket.objects.filter(Prefix=self._encode_name(name)): + parts = item.key.split("/") + parts = parts[len(base_parts):] + if len(parts) == 1: + # File + files.append(parts[0]) + elif len(parts) > 1: + # Directory + dirs.add(parts[0]) + return list(dirs), files + + def size(self, name): + name = self._normalize_name(self._clean_name(name)) + if self.entries: + entry = self.entries.get(name) + if entry: + return entry.content_length + return 0 + return self.bucket.Object(self._encode_name(name)).content_length + + def modified_time(self, name): + name = self._normalize_name(self._clean_name(name)) + entry = self.entries.get(name) + # only call self.bucket.Object() if the key is not found + # in the preloaded metadata. + if entry is None: + entry = self.bucket.Object(self._encode_name(name)) + return entry.last_modified + + def _strip_signing_parameters(self, url): + # Boto3 does not currently support generating URLs that are unsigned. Instead we + # take the signed URLs and strip any querystring params related to signing and expiration. + # Note that this may end up with URLs that are still invalid, especially if params are + # passed in that only work with signed URLs, e.g. response header params. + # The code attempts to strip all query parameters that match names of known parameters + # from v2 and v4 signatures, regardless of the actual signature version used. + split_url = urlparse.urlsplit(url) + qs = urlparse.parse_qsl(split_url.query, keep_blank_values=True) + blacklist = set(['x-amz-algorithm', 'x-amz-credential', 'x-amz-date', + 'x-amz-expires', 'x-amz-signedheaders', 'x-amz-signature', + 'x-amz-security-token', 'awsaccesskeyid', 'expires', 'signature']) + filtered_qs = ((key, val) for key, val in qs if key.lower() not in blacklist) + # Note: Parameters that did not have a value in the original query string will have + # an '=' sign appended to it, e.g ?foo&bar becomes ?foo=&bar= + joined_qs = ('='.join(keyval) for keyval in filtered_qs) + split_url = split_url._replace(query="&".join(joined_qs)) + return split_url.geturl() + + def url(self, name, parameters=None, expire=None): + # Preserve the trailing slash after normalizing the path. + # TODO: Handle force_http=not self.secure_urls like in s3boto + name = self._normalize_name(self._clean_name(name)) + if self.custom_domain: + return "%s//%s/%s" % (self.url_protocol, + self.custom_domain, filepath_to_uri(name)) + if expire is None: + expire = self.querystring_expire + + params = parameters.copy() if parameters else {} + params['Bucket'] = self.bucket.name + params['Key'] = self._encode_name(name) + url = self.bucket.meta.client.generate_presigned_url('get_object', Params=params, + ExpiresIn=expire) + if self.querystring_auth: + return url + return self._strip_signing_parameters(url) + + def get_available_name(self, name, max_length=None): + """Overwrite existing file with the same name.""" + if self.file_overwrite: + name = self._clean_name(name) + return name + return super(S3Boto3Storage, self).get_available_name(name, max_length) diff --git a/tests/test_s3boto3.py b/tests/test_s3boto3.py new file mode 100644 index 000000000..9a7611f42 --- /dev/null +++ b/tests/test_s3boto3.py @@ -0,0 +1,313 @@ +import gzip +import unittest +try: + from unittest import mock +except ImportError: # Python 3.2 and below + import mock + +from django.test import TestCase +from django.core.files.base import ContentFile +import django + +from botocore.exceptions import ClientError + +from storages.compat import urlparse +from storages.backends import s3boto3 + +__all__ = ( + 'SafeJoinTest', + 'S3Boto3StorageTests', +) + + +class S3Boto3TestCase(TestCase): + @mock.patch('storages.backends.s3boto3.resource') + def setUp(self, resource): + self.storage = s3boto3.S3Boto3Storage() + self.storage._connection = mock.MagicMock() + + +class SafeJoinTest(TestCase): + def test_normal(self): + path = s3boto3.safe_join("", "path/to/somewhere", "other", "path/to/somewhere") + self.assertEquals(path, "path/to/somewhere/other/path/to/somewhere") + + def test_with_dot(self): + path = s3boto3.safe_join("", "path/./somewhere/../other", "..", + ".", "to/./somewhere") + self.assertEquals(path, "path/to/somewhere") + + def test_base_url(self): + path = s3boto3.safe_join("base_url", "path/to/somewhere") + self.assertEquals(path, "base_url/path/to/somewhere") + + def test_base_url_with_slash(self): + path = s3boto3.safe_join("base_url/", "path/to/somewhere") + self.assertEquals(path, "base_url/path/to/somewhere") + + def test_suspicious_operation(self): + self.assertRaises(ValueError, + s3boto3.safe_join, "base", "../../../../../../../etc/passwd") + + def test_trailing_slash(self): + """ + Test safe_join with paths that end with a trailing slash. + """ + path = s3boto3.safe_join("base_url/", "path/to/somewhere/") + self.assertEquals(path, "base_url/path/to/somewhere/") + + def test_trailing_slash_multi(self): + """ + Test safe_join with multiple paths that end with a trailing slash. + """ + path = s3boto3.safe_join("base_url/", "path/to/" "somewhere/") + self.assertEquals(path, "base_url/path/to/somewhere/") + + +class S3Boto3StorageTests(S3Boto3TestCase): + + def test_clean_name(self): + """ + Test the base case of _clean_name + """ + path = self.storage._clean_name("path/to/somewhere") + self.assertEqual(path, "path/to/somewhere") + + def test_clean_name_normalize(self): + """ + Test the normalization of _clean_name + """ + path = self.storage._clean_name("path/to/../somewhere") + self.assertEqual(path, "path/somewhere") + + def test_clean_name_trailing_slash(self): + """ + Test the _clean_name when the path has a trailing slash + """ + path = self.storage._clean_name("path/to/somewhere/") + self.assertEqual(path, "path/to/somewhere/") + + def test_clean_name_windows(self): + """ + Test the _clean_name when the path has a trailing slash + """ + path = self.storage._clean_name("path\\to\\somewhere") + self.assertEqual(path, "path/to/somewhere") + + def test_storage_url_slashes(self): + """ + Test URL generation. + """ + self.storage.custom_domain = 'example.com' + + # We expect no leading slashes in the path, + # and trailing slashes should be preserved. + self.assertEqual(self.storage.url(''), 'https://example.com/') + self.assertEqual(self.storage.url('path'), 'https://example.com/path') + self.assertEqual(self.storage.url('path/'), 'https://example.com/path/') + self.assertEqual(self.storage.url('path/1'), 'https://example.com/path/1') + self.assertEqual(self.storage.url('path/1/'), 'https://example.com/path/1/') + + def test_storage_save(self): + """ + Test saving a file + """ + name = 'test_storage_save.txt' + content = ContentFile('new content') + self.storage.save(name, content) + self.storage.bucket.Object.assert_called_once_with(name) + + obj = self.storage.bucket.Object.return_value + obj.put.assert_called_with( + Body=content, + ContentType='text/plain', + ACL=self.storage.default_acl, + ) + + def test_storage_save_gzip(self): + """ + Test saving a file with gzip enabled. + """ + self.storage.gzip = True + name = 'test_storage_save.css' + content = ContentFile("I should be gzip'd") + self.storage.save(name, content) + obj = self.storage.bucket.Object.return_value + obj.put.assert_called_with( + Body=mock.ANY, + ContentType='text/css', + ContentEncoding='gzip', + ACL=self.storage.default_acl + ) + body = obj.put.call_args[1]['Body'] + zfile = gzip.GzipFile(mode='rb', fileobj=body) + self.assertEquals(zfile.read(), b"I should be gzip'd") + + def test_compress_content_len(self): + """ + Test that file returned by _compress_content() is readable. + """ + self.storage.gzip = True + content = ContentFile("I should be gzip'd") + content = self.storage._compress_content(content) + self.assertTrue(len(content.read()) > 0) + + def test_storage_open_write(self): + """ + Test opening a file in write mode + """ + name = 'test_open_for_writing.txt' + content = 'new content' + + # Set the encryption flag used for multipart uploads + self.storage.encryption = True + self.storage.reduced_redundancy = True + self.storage.default_acl = 'public-read' + + file = self.storage.open(name, 'w') + self.storage.bucket.Object.assert_called_with(name) + obj = self.storage.bucket.Object.return_value + # Set the name of the mock object + obj.key = name + + file.write(content) + obj.initiate_multipart_upload.assert_called_with( + ACL='public-read', + ContentType='text/plain', + ServerSideEncryption='AES256', + StorageClass='REDUCED_REDUNDANCY' + ) + + # Save the internal file before closing + multipart = obj.initiate_multipart_upload.return_value + multipart.parts.all.return_value = [mock.MagicMock(e_tag='123', part_number=1)] + file.close() + multipart.Part.assert_called_with(1) + part = multipart.Part.return_value + part.upload.assert_called_with(Body=content.encode('utf-8')) + multipart.complete.assert_called_once_with( + MultipartUpload={'Parts': [{'ETag': '123', 'PartNumber': 1}]}) + + # def test_storage_exists_bucket(self): + # bucket = self.storage._connection.Bucket.return_value + # bucket.meta.client.head_bucket.side_effect = ClientError( + # {'Error': {'Code': 123, 'Message': 'Fake'}}, 'load') + # self.assertFalse(self.storage.exists('')) + # + # self.storage.bucket.meta.client.head_bucket.side_effect = None + # self.assertTrue(self.storage.exists('')) + + def test_storage_exists(self): + obj = self.storage.bucket.Object.return_value + self.assertTrue(self.storage.exists("file.txt")) + self.storage.bucket.Object.assert_called_with("file.txt") + obj.load.assert_called_with() + + def test_storage_exists_false(self): + obj = self.storage.bucket.Object.return_value + obj.load.side_effect = ClientError({'Error': {'Code': 123, 'Message': 'Fake'}}, 'load') + self.assertFalse(self.storage.exists("file.txt")) + self.storage.bucket.Object.assert_called_with("file.txt") + obj.load.assert_called_with() + + def test_storage_delete(self): + self.storage.delete("path/to/file.txt") + self.storage.bucket.Object.assert_called_with('path/to/file.txt') + self.storage.bucket.Object.return_value.delete.assert_called_with() + + def test_storage_listdir_base(self): + file_names = ["some/path/1.txt", "2.txt", "other/path/3.txt", "4.txt"] + + result = [] + for p in file_names: + obj = mock.MagicMock() + obj.key = p + result.append(obj) + self.storage.bucket.objects.filter.return_value = iter(result) + + dirs, files = self.storage.listdir("") + self.storage.bucket.objects.filter.assert_called_with(Prefix="") + + self.assertEqual(len(dirs), 2) + for directory in ["some", "other"]: + self.assertTrue(directory in dirs, + """ "%s" not in directory list "%s".""" % ( + directory, dirs)) + + self.assertEqual(len(files), 2) + for filename in ["2.txt", "4.txt"]: + self.assertTrue(filename in files, + """ "%s" not in file list "%s".""" % ( + filename, files)) + + def test_storage_listdir_subdir(self): + file_names = ["some/path/1.txt", "some/2.txt"] + + result = [] + for p in file_names: + obj = mock.MagicMock() + obj.key = p + result.append(obj) + self.storage.bucket.objects.filter.return_value = iter(result) + + dirs, files = self.storage.listdir("some/") + self.storage.bucket.objects.filter.assert_called_with(Prefix="some/") + + self.assertEqual(len(dirs), 1) + self.assertTrue('path' in dirs, + """ "path" not in directory list "%s".""" % (dirs,)) + + self.assertEqual(len(files), 1) + self.assertTrue('2.txt' in files, + """ "2.txt" not in files list "%s".""" % (files,)) + + def test_storage_size(self): + obj = self.storage.bucket.Object.return_value + obj.content_length = 4098 + + name = 'file.txt' + self.assertEqual(self.storage.size(name), obj.content_length) + + def test_storage_url(self): + name = 'test_storage_size.txt' + url = 'http://aws.amazon.com/%s' % name + self.storage.bucket.meta.client.generate_presigned_url.return_value = url + self.storage.bucket.name = 'bucket' + self.assertEquals(self.storage.url(name), url) + self.storage.bucket.meta.client.generate_presigned_url.assert_called_with( + 'get_object', + Params={'Bucket': self.storage.bucket.name, 'Key': name}, + ExpiresIn=self.storage.querystring_expire + ) + + custom_expire = 123 + + self.assertEquals(self.storage.url(name, expire=custom_expire), url) + self.storage.bucket.meta.client.generate_presigned_url.assert_called_with( + 'get_object', + Params={'Bucket': self.storage.bucket.name, 'Key': name}, + ExpiresIn=custom_expire + ) + + def test_generated_url_is_encoded(self): + self.storage.custom_domain = "mock.cloudfront.net" + filename = "whacky & filename.mp4" + url = self.storage.url(filename) + parsed_url = urlparse.urlparse(url) + self.assertEqual(parsed_url.path, + "/whacky%20%26%20filename.mp4") + self.assertFalse(self.storage.bucket.meta.client.generate_presigned_url.called) + + @unittest.skipIf(django.VERSION >= (1, 8), + 'Only test backward compat of max_length for versions before 1.8') + def test_max_length_compat_okay(self): + self.storage.file_overwrite = False + self.storage.exists = lambda name: False + self.storage.get_available_name('gogogo', max_length=255) + + def test_strip_signing_parameters(self): + expected = 'http://bucket.s3-aws-region.amazonaws.com/foo/bar' + self.assertEquals(self.storage._strip_signing_parameters( + '%s?X-Amz-Date=12345678&X-Amz-Signature=Signature' % expected), expected) + self.assertEquals(self.storage._strip_signing_parameters( + '%s?expires=12345678&signature=Signature' % expected), expected) diff --git a/tox.ini b/tox.ini index 2f0464084..4eac06886 100644 --- a/tox.ini +++ b/tox.ini @@ -17,5 +17,6 @@ deps = py27: mock==1.0.1 boto>=2.32.0 pytest-cov==2.2.1 + boto3>=1.2.3 dropbox>=3.24 paramiko From fe2f9bebeed9dfcc465a48acaccf917b6f8ab7ae Mon Sep 17 00:00:00 2001 From: Josh Schneier Date: Tue, 2 Aug 2016 14:48:32 +0200 Subject: [PATCH 013/174] Update CHANGELOG for recent merges --- CHANGELOG.rst | 12 +++++++++++- 1 file changed, 11 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.rst b/CHANGELOG.rst index 47e14727c..37ce31b50 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -1,14 +1,24 @@ django-storages change log ========================== -1.4.2 (XXXX-XX-XX) +1.5.0 (2016-08-02) ****************** +* Add new backend ``S3Boto3Storage`` (`#179`_) MASSIVE THANKS @mbarrien +* Add a `strict` option to `utils.setting` (`#176`_) thanks @ZuluPro +* Tests, documentation, fixing ``.close`` for ``SFTPStorage`` (`#177`_) thanks @ZuluPro +* Tests, documentation, add `.readlines` for ``FTPStorage`` (`#175`_) thanks @ZuluPro +* Tests and documentation for ``DropBoxStorage`` (`#174`_) thanks @ZuluPro * Fix ``MANIFEST.in`` to not ship ``.pyc`` files. (`#145`_) thanks @fladi * Enable CI testing of Python3.5 and fix test failure from api change (`#171`_) thanks @tnir .. _#145: https://github.com/jschneier/django-storages/pull/145 .. _#171: https://github.com/jschneier/django-storages/pull/171 +.. _#174: https://github.com/jschneier/django-storages/pull/174 +.. _#175: https://github.com/jschneier/django-storages/pull/175 +.. _#177: https://github.com/jschneier/django-storages/pull/177 +.. _#176: https://github.com/jschneier/django-storages/pull/176 +.. _#179: https://github.com/jschneier/django-storages/pull/179 1.4.1 (2016-04-07) ****************** From d4c7c566790eb089340bebe492cf50547e2eb12c Mon Sep 17 00:00:00 2001 From: Josh Schneier Date: Tue, 2 Aug 2016 14:51:38 +0200 Subject: [PATCH 014/174] Release version 1.5.0 --- storages/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/storages/__init__.py b/storages/__init__.py index 8e3c933cd..77f1c8e63 100644 --- a/storages/__init__.py +++ b/storages/__init__.py @@ -1 +1 @@ -__version__ = '1.4.1' +__version__ = '1.5.0' From 1c5b209d674a64181121f94c3345dbdc0348b394 Mon Sep 17 00:00:00 2001 From: Nik Nyby Date: Thu, 4 Aug 2016 13:31:50 -0400 Subject: [PATCH 015/174] link to https version of docs (#184) --- README.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.rst b/README.rst index 6ea632893..2b16bdecb 100644 --- a/README.rst +++ b/README.rst @@ -52,7 +52,7 @@ Issues are tracked via GitHub issues at the `project issue page Documentation ============= -The original documentation for django-storages is located at http://django-storages.readthedocs.org/. +The original documentation for django-storages is located at https://django-storages.readthedocs.org/. Stay tuned for forthcoming documentation updates. Contributing From 302aa743c621f7d801142ac54c825aabed8c1125 Mon Sep 17 00:00:00 2001 From: Josh Wright Date: Thu, 4 Aug 2016 13:35:23 -0400 Subject: [PATCH 016/174] Fix TZ awareness in S3Boto3Storage (#181) Django expects a naive datetime from modified_time()[1]. Django 1.10 adds get_modified_time()[2], which respects USE_TZ. This commit fixes modified_time(), adds get_modified_time(), and adds relevant tests. [1]: https://docs.djangoproject.com/en/1.10/ref/files/storage/#django.core.files.storage.Storage.modified_time [2]: https://docs.djangoproject.com/en/1.10/ref/files/storage/#django.core.files.storage.Storage.get_modified_time --- storages/backends/s3boto3.py | 17 +++++++++++++++-- tests/settings.py | 3 +++ tests/test_s3boto3.py | 17 +++++++++++++++++ 3 files changed, 35 insertions(+), 2 deletions(-) diff --git a/storages/backends/s3boto3.py b/storages/backends/s3boto3.py index afebc9a1c..e6bf08ceb 100644 --- a/storages/backends/s3boto3.py +++ b/storages/backends/s3boto3.py @@ -7,6 +7,7 @@ from django.core.files.base import File from django.core.exceptions import ImproperlyConfigured, SuspiciousOperation from django.utils.encoding import force_text, smart_str, filepath_to_uri, force_bytes +from django.utils.timezone import localtime try: from boto3 import resource @@ -511,14 +512,26 @@ def size(self, name): return 0 return self.bucket.Object(self._encode_name(name)).content_length - def modified_time(self, name): + def get_modified_time(self, name): + """ + Returns an (aware) datetime object containing the last modified time if + USE_TZ is True, otherwise returns a naive datetime in the local timezone. + """ name = self._normalize_name(self._clean_name(name)) entry = self.entries.get(name) # only call self.bucket.Object() if the key is not found # in the preloaded metadata. if entry is None: entry = self.bucket.Object(self._encode_name(name)) - return entry.last_modified + if setting('USE_TZ'): + # boto3 returns TZ aware timestamps + return entry.last_modified + else: + return localtime(entry.last_modified).replace(tzinfo=None) + + def modified_time(self, name): + """Returns a naive datetime object containing the last modified time.""" + return localtime(self.get_modified_time(name)).replace(tzinfo=None) def _strip_signing_parameters(self, url): # Boto3 does not currently support generating URLs that are unsigned. Instead we diff --git a/tests/settings.py b/tests/settings.py index d84433b1d..43047b6cc 100644 --- a/tests/settings.py +++ b/tests/settings.py @@ -28,3 +28,6 @@ AWS_IS_GZIPPED = True GS_IS_GZIPPED = True SECRET_KEY = 'hailthesunshine' + +USE_TZ = True +TIME_ZONE = 'America/Chicago' diff --git a/tests/test_s3boto3.py b/tests/test_s3boto3.py index 9a7611f42..5e3370a2e 100644 --- a/tests/test_s3boto3.py +++ b/tests/test_s3boto3.py @@ -4,9 +4,11 @@ from unittest import mock except ImportError: # Python 3.2 and below import mock +from datetime import datetime, timedelta, tzinfo from django.test import TestCase from django.core.files.base import ContentFile +from django.utils.timezone import is_aware, utc import django from botocore.exceptions import ClientError @@ -268,6 +270,21 @@ def test_storage_size(self): name = 'file.txt' self.assertEqual(self.storage.size(name), obj.content_length) + def test_storage_mtime(self): + obj = self.storage.bucket.Object.return_value + obj.last_modified = datetime.now(utc) + + name = 'file.txt' + self.assertFalse( + is_aware(self.storage.modified_time(name)), + 'Naive datetime object expected from modified_time()' + ) + + self.assertTrue( + is_aware(self.storage.get_modified_time(name)), + 'Aware datetime object expected from get_modified_time()' + ) + def test_storage_url(self): name = 'test_storage_size.txt' url = 'http://aws.amazon.com/%s' % name From 13aa8578b248f4dc595925814afe3cad4398785b Mon Sep 17 00:00:00 2001 From: Josh Schneier Date: Thu, 4 Aug 2016 20:45:51 +0200 Subject: [PATCH 017/174] Fix broken gzip test --- tests/test_s3boto.py | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/tests/test_s3boto.py b/tests/test_s3boto.py index f5d4a85c8..a56598f21 100644 --- a/tests/test_s3boto.py +++ b/tests/test_s3boto.py @@ -153,8 +153,7 @@ def test_storage_save_gzip(self): """ Test saving a file with gzip enabled. """ - if not s3boto.S3BotoStorage.gzip: # Gzip not available. - return + self.storage.gzip = True name = 'test_storage_save.css' content = ContentFile("I should be gzip'd") self.storage.save(name, content) @@ -172,8 +171,6 @@ def test_compress_content_len(self): """ Test that file returned by _compress_content() is readable. """ - if not s3boto.S3BotoStorage.gzip: # Gzip not available. - return content = ContentFile("I should be gzip'd") content = self.storage._compress_content(content) self.assertTrue(len(content.read()) > 0) From e4bce772032dd5ce9c80108e55e01f8eb0e18701 Mon Sep 17 00:00:00 2001 From: Josh Schneier Date: Fri, 5 Aug 2016 16:12:21 +0200 Subject: [PATCH 018/174] Update CHANGELOG for #181 --- CHANGELOG.rst | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/CHANGELOG.rst b/CHANGELOG.rst index 37ce31b50..4aad1c29d 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -1,6 +1,13 @@ django-storages change log ========================== +1.5.1 (XXXX-XX-XX) +****************** + +* Update ``S3Boto3Storage`` for Django 1.10 (`#181`_) (``get_modified_time`` and ``get_accessed_time``) thanks @JshWright + +.. _#181: https://github.com/jschneier/django-storages/pull/181 + 1.5.0 (2016-08-02) ****************** From 8a5f40c85288b66d27b97944ef4bd1aaa3fccdf1 Mon Sep 17 00:00:00 2001 From: Josh Schneier Date: Fri, 5 Aug 2016 17:47:22 +0200 Subject: [PATCH 019/174] Django110 (#185) * Drop Django 1.7 support and add Django 1.10 Remove unnecessary compatability wrappers. * Tell Travis to use the correct envs * Update CHANGELOG * Fix OverwriteStorage typo --- .travis.yml | 6 ++-- CHANGELOG.rst | 2 ++ storages/backends/apache_libcloud.py | 6 ++-- storages/backends/azure_storage.py | 2 +- storages/backends/couchdb.py | 13 ++++---- storages/backends/database.py | 49 ++++++++++++++-------------- storages/backends/dropbox.py | 4 +-- storages/backends/ftp.py | 4 ++- storages/backends/gs.py | 2 +- storages/backends/hashpath.py | 6 ++-- storages/backends/image.py | 24 +++++++------- storages/backends/mogile.py | 35 ++++++++++---------- storages/backends/overwrite.py | 2 +- storages/backends/s3boto.py | 5 ++- storages/backends/s3boto3.py | 7 ++-- storages/backends/sftpstorage.py | 4 ++- storages/backends/symlinkorcopy.py | 2 +- storages/compat.py | 28 ---------------- tests/test_s3boto.py | 10 +----- tests/test_s3boto3.py | 13 ++------ tox.ini | 4 +-- 21 files changed, 99 insertions(+), 129 deletions(-) delete mode 100644 storages/compat.py diff --git a/.travis.yml b/.travis.yml index 081ae5ad0..205ed3a13 100644 --- a/.travis.yml +++ b/.travis.yml @@ -5,9 +5,6 @@ python: - 3.5 env: - - TOX_ENV=py27-django17 - - TOX_ENV=py33-django17 - - TOX_ENV=py34-django17 - TOX_ENV=py27-django18 - TOX_ENV=py33-django18 - TOX_ENV=py34-django18 @@ -15,6 +12,9 @@ env: - TOX_ENV=py27-django19 - TOX_ENV=py34-django19 - TOX_ENV=py35-django19 + - TOX_ENV=py27-django110 + - TOX_ENV=py34-django110 + - TOX_ENV=py35-django110 before_install: - pip install codecov diff --git a/CHANGELOG.rst b/CHANGELOG.rst index 4aad1c29d..b2a1b0b0c 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -4,8 +4,10 @@ django-storages change log 1.5.1 (XXXX-XX-XX) ****************** +* **Drop support for Django 1.7** (`#185`_) * Update ``S3Boto3Storage`` for Django 1.10 (`#181`_) (``get_modified_time`` and ``get_accessed_time``) thanks @JshWright +.. _#185: https://github.com/jschneier/django-storages/pull/185 .. _#181: https://github.com/jschneier/django-storages/pull/181 1.5.0 (2016-08-02) diff --git a/storages/backends/apache_libcloud.py b/storages/backends/apache_libcloud.py index 4dc4b8b00..a2a5390de 100644 --- a/storages/backends/apache_libcloud.py +++ b/storages/backends/apache_libcloud.py @@ -5,12 +5,12 @@ from django.conf import settings from django.core.files.base import File +from django.core.files.storage import Storage from django.core.exceptions import ImproperlyConfigured -from django.utils.six import string_types +from django.utils.deconstruct import deconstructible +from django.utils.six import string_types, BytesIO from django.utils.six.moves.urllib.parse import urljoin -from storages.compat import BytesIO, deconstructible, Storage - try: from libcloud.storage.providers import get_driver from libcloud.storage.types import ObjectDoesNotExistError, Provider diff --git a/storages/backends/azure_storage.py b/storages/backends/azure_storage.py index e1e4b5651..52328b713 100644 --- a/storages/backends/azure_storage.py +++ b/storages/backends/azure_storage.py @@ -6,7 +6,7 @@ from django.core.files.base import ContentFile from django.core.exceptions import ImproperlyConfigured -from storages.compat import Storage +from django.core.files.storage import Storage try: import azure # noqa diff --git a/storages/backends/couchdb.py b/storages/backends/couchdb.py index 2bcecd8a2..1dcfadee9 100644 --- a/storages/backends/couchdb.py +++ b/storages/backends/couchdb.py @@ -7,9 +7,10 @@ from django.conf import settings from django.core.files import File +from django.core.files.storage import Storage from django.core.exceptions import ImproperlyConfigured - -from storages.compat import urlparse, BytesIO, Storage +from django.utils.six.moves.urllib import parse as urlparse +from django.utils.six import BytesIO try: import couchdb @@ -17,8 +18,8 @@ raise ImproperlyConfigured("Could not load couchdb dependency.\ \nSee http://code.google.com/p/couchdb-python/") -DEFAULT_SERVER= getattr(settings, 'COUCHDB_DEFAULT_SERVER', 'http://couchdb.local:5984') -STORAGE_OPTIONS= getattr(settings, 'COUCHDB_STORAGE_OPTIONS', {}) +DEFAULT_SERVER = getattr(settings, 'COUCHDB_DEFAULT_SERVER', 'http://couchdb.local:5984') +STORAGE_OPTIONS = getattr(settings, 'COUCHDB_STORAGE_OPTIONS', {}) class CouchDBStorage(Storage): @@ -26,9 +27,9 @@ class CouchDBStorage(Storage): CouchDBStorage - a Django Storage class for CouchDB. The CouchDBStorage can be configured in settings.py, e.g.:: - + COUCHDB_STORAGE_OPTIONS = { - 'server': "http://example.org", + 'server': "http://example.org", 'database': 'database_name' } diff --git a/storages/backends/database.py b/storages/backends/database.py index e0057ab16..114bdcf11 100644 --- a/storages/backends/database.py +++ b/storages/backends/database.py @@ -3,9 +3,10 @@ from django.conf import settings from django.core.files import File +from django.core.files.storage import Storage from django.core.exceptions import ImproperlyConfigured - -from storages.compat import urlparse, BytesIO, Storage +from django.utils.six import BytesIO +from django.utils.six.moves.urllib import parse as urlparse try: import pyodbc @@ -18,26 +19,26 @@ class DatabaseStorage(Storage): """ - Class DatabaseStorage provides storing files in the database. + Class DatabaseStorage provides storing files in the database. """ def __init__(self, option=settings.DB_FILES): - """Constructor. - + """Constructor. + Constructs object using dictionary either specified in contucotr or -in settings.DB_FILES. - +in settings.DB_FILES. + @param option dictionary with 'db_table', 'fname_column', -'blob_column', 'size_column', 'base_url' keys. - +'blob_column', 'size_column', 'base_url' keys. + option['db_table'] Table to work with. option['fname_column'] Column in the 'db_table' containing filenames (filenames can contain pathes). Values should be the same as where FileField keeps -filenames. +filenames. It is used to map filename to blob_column. In sql it's simply -used in where clause. +used in where clause. option['blob_column'] Blob column (for example 'image' type), created manually in the 'db_table', used to store image. @@ -46,7 +47,7 @@ def __init__(self, option=settings.DB_FILES): method (another way is to open file and get size) option['base_url'] Url prefix used with filenames. Should be mapped to the view, -that returns an image as result. +that returns an image as result. """ if not option or not all([field in option for field in REQUIRED_FIELDS]): @@ -64,18 +65,18 @@ def __init__(self, option=settings.DB_FILES): self.DATABASE_USER = settings.DATABASE_USER self.DATABASE_PASSWORD = settings.DATABASE_PASSWORD self.DATABASE_HOST = settings.DATABASE_HOST - + self.connection = pyodbc.connect('DRIVER=%s;SERVER=%s;DATABASE=%s;UID=%s;PWD=%s'%(self.DATABASE_ODBC_DRIVER,self.DATABASE_HOST,self.DATABASE_NAME, self.DATABASE_USER, self.DATABASE_PASSWORD) ) self.cursor = self.connection.cursor() def _open(self, name, mode='rb'): - """Open a file from database. - + """Open a file from database. + @param name filename or relative path to file based on base_url. path should contain only "/", but not "\". Apache sends pathes with "/". If there is no such file in the db, returs None """ - + assert mode == 'rb', "You've tried to open binary file without specifying binary mode! You specified: %s"%mode row = self.cursor.execute("SELECT %s from %s where %s = '%s'"%(self.blob_column,self.db_table,self.fname_column,name) ).fetchone() @@ -84,23 +85,23 @@ def _open(self, name, mode='rb'): inMemFile = BytesIO(row[0]) inMemFile.name = name inMemFile.mode = mode - + retFile = File(inMemFile) return retFile def _save(self, name, content): """Save 'content' as file named 'name'. - - @note '\' in path will be converted to '/'. + + @note '\' in path will be converted to '/'. """ - + name = name.replace('\\', '/') binary = pyodbc.Binary(content.read()) size = len(binary) - + #todo: check result and do something (exception?) if failed. if self.exists(name): - self.cursor.execute("UPDATE %s SET %s = ?, %s = ? WHERE %s = '%s'"%(self.db_table,self.blob_column,self.size_column,self.fname_column,name), + self.cursor.execute("UPDATE %s SET %s = ?, %s = ? WHERE %s = '%s'"%(self.db_table,self.blob_column,self.size_column,self.fname_column,name), (binary, size) ) else: self.cursor.execute("INSERT INTO %s VALUES(?, ?, ?)"%(self.db_table), (name, binary, size) ) @@ -110,7 +111,7 @@ def _save(self, name, content): def exists(self, name): row = self.cursor.execute("SELECT %s from %s where %s = '%s'"%(self.fname_column,self.db_table,self.fname_column,name)).fetchone() return row is not None - + def get_available_name(self, name, max_length=None): return name @@ -123,7 +124,7 @@ def url(self, name): if self.base_url is None: raise ValueError("This file is not accessible via a URL.") return urlparse.urljoin(self.base_url, name).replace('\\', '/') - + def size(self, name): row = self.cursor.execute("SELECT %s from %s where %s = '%s'"%(self.size_column,self.db_table,self.fname_column,name)).fetchone() if row is None: diff --git a/storages/backends/dropbox.py b/storages/backends/dropbox.py index fdeb9a70b..040858f89 100644 --- a/storages/backends/dropbox.py +++ b/storages/backends/dropbox.py @@ -14,11 +14,11 @@ from tempfile import SpooledTemporaryFile from shutil import copyfileobj -from django.core.files.base import File from django.core.exceptions import ImproperlyConfigured +from django.core.files.base import File +from django.core.files.storage import Storage from django.utils._os import safe_join -from storages.compat import Storage from storages.utils import setting from dropbox.client import DropboxClient diff --git a/storages/backends/ftp.py b/storages/backends/ftp.py index 296612f7c..c49f9c45e 100644 --- a/storages/backends/ftp.py +++ b/storages/backends/ftp.py @@ -20,9 +20,11 @@ from django.conf import settings from django.core.files.base import File +from django.core.files.storage import Storage from django.core.exceptions import ImproperlyConfigured +from django.utils.six.moves.urllib import parse as urlparse +from django.utils.six import BytesIO -from storages.compat import urlparse, BytesIO, Storage from storages.utils import setting diff --git a/storages/backends/gs.py b/storages/backends/gs.py index 2ee2ccf79..318ab0b84 100644 --- a/storages/backends/gs.py +++ b/storages/backends/gs.py @@ -1,8 +1,8 @@ from django.core.exceptions import ImproperlyConfigured +from django.utils.six import BytesIO from storages.backends.s3boto import S3BotoStorage, S3BotoStorageFile from storages.utils import setting -from storages.compat import BytesIO try: from boto.gs.connection import GSConnection, SubdomainCallingFormat diff --git a/storages/backends/hashpath.py b/storages/backends/hashpath.py index 44343c9ef..252f30fd9 100644 --- a/storages/backends/hashpath.py +++ b/storages/backends/hashpath.py @@ -1,7 +1,9 @@ -import os, hashlib, errno +import errno +import hashlib +import os +from django.core.files.storage import FileSystemStorage from django.utils.encoding import force_text, force_bytes -from storages.compat import FileSystemStorage class HashPathStorage(FileSystemStorage): diff --git a/storages/backends/image.py b/storages/backends/image.py index 637ae8b6b..0be152ad1 100644 --- a/storages/backends/image.py +++ b/storages/backends/image.py @@ -2,7 +2,7 @@ import os from django.core.exceptions import ImproperlyConfigured -from storages.compat import FileSystemStorage +from django.core.files.storage import FileSystemStorage try: from PIL import ImageFile as PILImageFile @@ -14,25 +14,25 @@ class ImageStorage(FileSystemStorage): """ A FileSystemStorage which normalizes extensions for images. - + Comes from http://www.djangosnippets.org/snippets/965/ """ - + def find_extension(self, format): """Normalizes PIL-returned format into a standard, lowercase extension.""" format = format.lower() - + if format == 'jpeg': format = 'jpg' - + return format - + def save(self, name, content, max_length=None): dirname = os.path.dirname(name) basename = os.path.basename(name) - + # Use PIL to determine filetype - + p = PILImageFile.Parser() while 1: data = content.read(1024) @@ -42,14 +42,14 @@ def save(self, name, content, max_length=None): if p.image: im = p.image break - + extension = self.find_extension(im.format) - + # Does the basename already have an extension? If so, replace it. # bare as in without extension bare_basename, _ = os.path.splitext(basename) basename = bare_basename + '.' + extension - + name = os.path.join(dirname, basename) return super(ImageStorage, self).save(name, content) - + diff --git a/storages/backends/mogile.py b/storages/backends/mogile.py index 5a31f663a..61d123c58 100644 --- a/storages/backends/mogile.py +++ b/storages/backends/mogile.py @@ -7,8 +7,7 @@ from django.utils.text import force_text from django.http import HttpResponse, HttpResponseNotFound from django.core.exceptions import ImproperlyConfigured - -from storages.compat import urlparse, BytesIO, Storage +from django.core.files.storage import Storage try: import mogilefs @@ -20,37 +19,37 @@ class MogileFSStorage(Storage): """MogileFS filesystem storage""" def __init__(self, base_url=settings.MEDIA_URL): - + # the MOGILEFS_MEDIA_URL overrides MEDIA_URL if hasattr(settings, 'MOGILEFS_MEDIA_URL'): self.base_url = settings.MOGILEFS_MEDIA_URL else: self.base_url = base_url - + for var in ('MOGILEFS_TRACKERS', 'MOGILEFS_DOMAIN',): if not hasattr(settings, var): raise ImproperlyConfigured("You must define %s to use the MogileFS backend." % var) - + self.trackers = settings.MOGILEFS_TRACKERS self.domain = settings.MOGILEFS_DOMAIN self.client = mogilefs.Client(self.domain, self.trackers) - + def get_mogile_paths(self, filename): - return self.client.get_paths(filename) - + return self.client.get_paths(filename) + # The following methods define the Backend API def filesize(self, filename): raise NotImplemented #return os.path.getsize(self._get_absolute_path(filename)) - + def path(self, filename): paths = self.get_mogile_paths(filename) if paths: return self.get_mogile_paths(filename)[0] else: return None - + def url(self, filename): return urlparse.urljoin(self.base_url, filename).replace('\\', '/') @@ -63,7 +62,7 @@ def exists(self, filename): def save(self, filename, raw_contents, max_length=None): filename = self.get_available_name(filename, max_length) - + if not hasattr(self, 'mogile_class'): self.mogile_class = None @@ -78,8 +77,8 @@ def save(self, filename, raw_contents, max_length=None): def delete(self, filename): self.client.delete(filename) - - + + def serve_mogilefs_file(request, key=None): """ Called when a user requests an image. @@ -90,21 +89,21 @@ def serve_mogilefs_file(request, key=None): client = mogilefs.Client(settings.MOGILEFS_DOMAIN, settings.MOGILEFS_TRACKERS) if hasattr(settings, "SERVE_WITH_PERLBAL") and settings.SERVE_WITH_PERLBAL: # we're reproxying with perlbal - + # check the path cache - + path = cache.get(key) if not path: path = client.get_paths(key) cache.set(key, path, 60) - + if path: response = HttpResponse(content_type=mimetype) response['X-REPROXY-URL'] = path[0] else: response = HttpResponseNotFound() - + else: # we don't have perlbal, let's just serve the image via django file_data = client[key] @@ -112,5 +111,5 @@ def serve_mogilefs_file(request, key=None): response = HttpResponse(file_data, mimetype=mimetype) else: response = HttpResponseNotFound() - + return response diff --git a/storages/backends/overwrite.py b/storages/backends/overwrite.py index 84969bddd..f4687dae5 100644 --- a/storages/backends/overwrite.py +++ b/storages/backends/overwrite.py @@ -1,4 +1,4 @@ -from storages.compat import FileSystemStorage +from django.core.files.storage import FileSystemStorage class OverwriteStorage(FileSystemStorage): diff --git a/storages/backends/s3boto.py b/storages/backends/s3boto.py index e5dc1fbd2..47d3ffa6d 100644 --- a/storages/backends/s3boto.py +++ b/storages/backends/s3boto.py @@ -6,8 +6,12 @@ from tempfile import SpooledTemporaryFile from django.core.files.base import File +from django.core.files.storage import Storage from django.core.exceptions import ImproperlyConfigured, SuspiciousOperation +from django.utils.deconstruct import deconstructible from django.utils.encoding import force_text, smart_str, filepath_to_uri, force_bytes +from django.utils.six import BytesIO +from django.utils.six.moves.urllib import parse as urlparse try: from boto import __version__ as boto_version @@ -20,7 +24,6 @@ "See https://github.com/boto/boto") from storages.utils import setting -from storages.compat import urlparse, BytesIO, deconstructible, Storage boto_version_info = tuple([int(i) for i in boto_version.split('-')[0].split('.')]) diff --git a/storages/backends/s3boto3.py b/storages/backends/s3boto3.py index e6bf08ceb..9f06d122f 100644 --- a/storages/backends/s3boto3.py +++ b/storages/backends/s3boto3.py @@ -4,9 +4,13 @@ from gzip import GzipFile from tempfile import SpooledTemporaryFile -from django.core.files.base import File from django.core.exceptions import ImproperlyConfigured, SuspiciousOperation +from django.core.files.base import File +from django.core.files.storage import Storage +from django.utils.deconstruct import deconstructible from django.utils.encoding import force_text, smart_str, filepath_to_uri, force_bytes +from django.utils.six.moves.urllib import parse as urlparse +from django.utils.six import BytesIO from django.utils.timezone import localtime try: @@ -19,7 +23,6 @@ "See https://github.com/boto/boto3") from storages.utils import setting -from storages.compat import urlparse, BytesIO, deconstructible, Storage boto3_version_info = tuple([int(i) for i in boto3_version.split('-')[0].split('.')]) diff --git a/storages/backends/sftpstorage.py b/storages/backends/sftpstorage.py index f219b5950..c39425ded 100644 --- a/storages/backends/sftpstorage.py +++ b/storages/backends/sftpstorage.py @@ -14,8 +14,10 @@ from django.conf import settings from django.core.files.base import File +from django.core.files.storage import Storage +from django.utils.six import BytesIO +from django.utils.six.moves.urllib import parse as urlparse -from storages.compat import urlparse, BytesIO, Storage from storages.utils import setting diff --git a/storages/backends/symlinkorcopy.py b/storages/backends/symlinkorcopy.py index 881042358..9216a2db4 100644 --- a/storages/backends/symlinkorcopy.py +++ b/storages/backends/symlinkorcopy.py @@ -1,7 +1,7 @@ import os from django.conf import settings -from storages.compat import FileSystemStorage +from django.core.files.storage import FileSystemStorage __doc__ = """ I needed to efficiently create a mirror of a directory tree (so that diff --git a/storages/compat.py b/storages/compat.py deleted file mode 100644 index 1ac3e1d0a..000000000 --- a/storages/compat.py +++ /dev/null @@ -1,28 +0,0 @@ -from django.utils.six.moves.urllib import parse as urlparse -from django.utils.six import BytesIO -import django - -try: - from django.utils.deconstruct import deconstructible -except ImportError: # Django 1.7+ migrations - deconstructible = lambda klass, *args, **kwargs: klass - -# Storage only accepts `max_length` in 1.8+ -if django.VERSION >= (1, 8): - from django.core.files.storage import Storage, FileSystemStorage -else: - from django.core.files.storage import Storage as DjangoStorage - from django.core.files.storage import FileSystemStorage as DjangoFileSystemStorage - - class StorageMixin(object): - def save(self, name, content, max_length=None): - return super(StorageMixin, self).save(name, content) - - def get_available_name(self, name, max_length=None): - return super(StorageMixin, self).get_available_name(name) - - class Storage(StorageMixin, DjangoStorage): - pass - - class FileSystemStorage(StorageMixin, DjangoFileSystemStorage): - pass diff --git a/tests/test_s3boto.py b/tests/test_s3boto.py index a56598f21..e19ed92b9 100644 --- a/tests/test_s3boto.py +++ b/tests/test_s3boto.py @@ -1,4 +1,3 @@ -import unittest try: from unittest import mock except ImportError: # Python 3.2 and below @@ -8,13 +7,12 @@ from django.test import TestCase from django.core.files.base import ContentFile -import django +from django.utils.six.moves.urllib import parse as urlparse from boto.exception import S3ResponseError from boto.s3.key import Key from boto.utils import parse_ts, ISO8601 -from storages.compat import urlparse from storages.backends import s3boto __all__ = ( @@ -329,9 +327,3 @@ def test_new_file_modified_time(self): self.storage.save(name, content) self.assertEqual(self.storage.modified_time(name), parse_ts(utcnow.strftime(ISO8601))) - - @unittest.skipIf(django.VERSION >= (1, 8), 'Only test backward compat of max_length for versions before 1.8') - def test_max_length_compat_okay(self): - self.storage.file_overwrite = False - self.storage.exists = lambda name: False - self.storage.get_available_name('gogogo', max_length=255) diff --git a/tests/test_s3boto3.py b/tests/test_s3boto3.py index 5e3370a2e..fbfdfdeae 100644 --- a/tests/test_s3boto3.py +++ b/tests/test_s3boto3.py @@ -1,19 +1,17 @@ +from datetime import datetime import gzip -import unittest try: from unittest import mock except ImportError: # Python 3.2 and below import mock -from datetime import datetime, timedelta, tzinfo from django.test import TestCase from django.core.files.base import ContentFile +from django.utils.six.moves.urllib import parse as urlparse from django.utils.timezone import is_aware, utc -import django from botocore.exceptions import ClientError -from storages.compat import urlparse from storages.backends import s3boto3 __all__ = ( @@ -315,13 +313,6 @@ def test_generated_url_is_encoded(self): "/whacky%20%26%20filename.mp4") self.assertFalse(self.storage.bucket.meta.client.generate_presigned_url.called) - @unittest.skipIf(django.VERSION >= (1, 8), - 'Only test backward compat of max_length for versions before 1.8') - def test_max_length_compat_okay(self): - self.storage.file_overwrite = False - self.storage.exists = lambda name: False - self.storage.get_available_name('gogogo', max_length=255) - def test_strip_signing_parameters(self): expected = 'http://bucket.s3-aws-region.amazonaws.com/foo/bar' self.assertEquals(self.storage._strip_signing_parameters( diff --git a/tox.ini b/tox.ini index 4eac06886..9c1a1e70f 100644 --- a/tox.ini +++ b/tox.ini @@ -1,8 +1,8 @@ [tox] envlist = - {py27,py33,py34}-django17, {py27,py33,py34,py35}-django18, {py27,py34,py35}-django19 + {py27,py34,py35}-django110 [testenv] @@ -11,9 +11,9 @@ setenv = PYTHONDONTWRITEBYTECODE=1 DJANGO_SETTINGS_MODULE=tests.settings deps = - django17: Django>=1.7, <1.8 django18: Django>=1.8, <1.9 django19: Django>=1.9, <1.10 + django110: Django>=1.10, <1.11 py27: mock==1.0.1 boto>=2.32.0 pytest-cov==2.2.1 From eca46e509843f39137843441c33f1c51812a909c Mon Sep 17 00:00:00 2001 From: a12k Date: Thu, 14 Apr 2016 17:03:56 -0500 Subject: [PATCH 020/174] Update amazon-S3.rst --- docs/backends/amazon-S3.rst | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/docs/backends/amazon-S3.rst b/docs/backends/amazon-S3.rst index ee6d39fb8..836f3137c 100644 --- a/docs/backends/amazon-S3.rst +++ b/docs/backends/amazon-S3.rst @@ -25,6 +25,10 @@ Your Amazon Web Services secret access key, as a string. Your Amazon Web Services storage bucket name, as a string. +``AWS_DEFAULT_ACL`` (optional) + +If set to ``private`` changes uploaded file's Access Control List from the default permission ``public-read`` to give owner full control and remove read access from everyone else. + ``AWS_AUTO_CREATE_BUCKET`` (optional) If set to ``True`` the bucket specified in ``AWS_STORAGE_BUCKET_NAME`` is automatically created. From 8ac7994028a8b8ecdca33c42a7635a63fffe0737 Mon Sep 17 00:00:00 2001 From: Leon Smith Date: Thu, 18 Aug 2016 16:05:14 +0100 Subject: [PATCH 021/174] Fix #189 - preload_metadata option on s3boto3 (#190) Looks like boto3 changed some keyword arguments and this prefix forgot to be captalised Signed-off-by: Leon Smith <_@leonmarksmith.com> --- storages/backends/s3boto3.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/storages/backends/s3boto3.py b/storages/backends/s3boto3.py index 9f06d122f..0124c65b8 100644 --- a/storages/backends/s3boto3.py +++ b/storages/backends/s3boto3.py @@ -313,7 +313,7 @@ def entries(self): """ if self.preload_metadata and not self._entries: self._entries = dict((self._decode_name(entry.key), entry) - for entry in self.bucket.objects.filter(prefix=self.location)) + for entry in self.bucket.objects.filter(Prefix=self.location)) return self._entries def _get_access_keys(self): From 78ec969641e9e2fde1746ab66bc58321ba9389b6 Mon Sep 17 00:00:00 2001 From: Josh Schneier Date: Thu, 18 Aug 2016 15:30:11 -0400 Subject: [PATCH 022/174] Update CHANGELOG --- CHANGELOG.rst | 3 +++ 1 file changed, 3 insertions(+) diff --git a/CHANGELOG.rst b/CHANGELOG.rst index b2a1b0b0c..43fd8a4b8 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -6,7 +6,10 @@ django-storages change log * **Drop support for Django 1.7** (`#185`_) * Update ``S3Boto3Storage`` for Django 1.10 (`#181`_) (``get_modified_time`` and ``get_accessed_time``) thanks @JshWright +* Fix bad kwarg name in ``S3Boto3Storage`` when `AWS_PRELOAD_METADATA` is `True` (`#189`_ `#190`_) thanks @leonsmith +.. _#190: https://github.com/jschneier/django-storages/pull/190 +.. _#189: https://github.com/jschneier/django-storages/issues/189 .. _#185: https://github.com/jschneier/django-storages/pull/185 .. _#181: https://github.com/jschneier/django-storages/pull/181 From 4f451442c84af412528da100d9d02359866e844b Mon Sep 17 00:00:00 2001 From: Jon Dufresne Date: Sun, 21 Aug 2016 16:58:52 -0700 Subject: [PATCH 023/174] Document supported versions of Django in setup.py (#192) Alphabetized entries. --- setup.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/setup.py b/setup.py index 6170482b5..4c28d115e 100644 --- a/setup.py +++ b/setup.py @@ -22,9 +22,12 @@ def get_requirements_tests(): long_description=read('README.rst') + '\n\n' + read('CHANGELOG.rst'), url='https://github.com/jschneier/django-storages', classifiers=[ - 'Framework :: Django', 'Development Status :: 5 - Production/Stable', 'Environment :: Web Environment', + 'Framework :: Django', + 'Framework :: Django :: 1.8', + 'Framework :: Django :: 1.9', + 'Framework :: Django :: 1.10', 'Intended Audience :: Developers', 'License :: OSI Approved :: BSD License', 'Operating System :: OS Independent', From 5f280571ee1ae93ee66ed805b53b08bfe5ab9f0c Mon Sep 17 00:00:00 2001 From: Michael Nacharov Date: Fri, 26 Aug 2016 22:43:36 +0600 Subject: [PATCH 024/174] All storage classes are deconstructible (#193) --- storages/backends/azure_storage.py | 2 ++ storages/backends/couchdb.py | 2 ++ storages/backends/database.py | 2 ++ storages/backends/dropbox.py | 2 ++ storages/backends/ftp.py | 2 ++ storages/backends/gs.py | 2 ++ storages/backends/hashpath.py | 2 ++ storages/backends/image.py | 2 ++ storages/backends/mogile.py | 2 ++ storages/backends/overwrite.py | 2 ++ storages/backends/sftpstorage.py | 2 ++ storages/backends/symlinkorcopy.py | 2 ++ 12 files changed, 24 insertions(+) diff --git a/storages/backends/azure_storage.py b/storages/backends/azure_storage.py index 52328b713..19494c4b2 100644 --- a/storages/backends/azure_storage.py +++ b/storages/backends/azure_storage.py @@ -7,6 +7,7 @@ from django.core.files.base import ContentFile from django.core.exceptions import ImproperlyConfigured from django.core.files.storage import Storage +from django.utils.deconstruct import deconstructible try: import azure # noqa @@ -30,6 +31,7 @@ def clean_name(name): return os.path.normpath(name).replace("\\", "/") +@deconstructible class AzureStorage(Storage): account_name = setting("AZURE_ACCOUNT_NAME") account_key = setting("AZURE_ACCOUNT_KEY") diff --git a/storages/backends/couchdb.py b/storages/backends/couchdb.py index 1dcfadee9..e0216009d 100644 --- a/storages/backends/couchdb.py +++ b/storages/backends/couchdb.py @@ -9,6 +9,7 @@ from django.core.files import File from django.core.files.storage import Storage from django.core.exceptions import ImproperlyConfigured +from django.utils.deconstruct import deconstructible from django.utils.six.moves.urllib import parse as urlparse from django.utils.six import BytesIO @@ -22,6 +23,7 @@ STORAGE_OPTIONS = getattr(settings, 'COUCHDB_STORAGE_OPTIONS', {}) +@deconstructible class CouchDBStorage(Storage): """ CouchDBStorage - a Django Storage class for CouchDB. diff --git a/storages/backends/database.py b/storages/backends/database.py index 114bdcf11..0f4669a81 100644 --- a/storages/backends/database.py +++ b/storages/backends/database.py @@ -5,6 +5,7 @@ from django.core.files import File from django.core.files.storage import Storage from django.core.exceptions import ImproperlyConfigured +from django.utils.deconstruct import deconstructible from django.utils.six import BytesIO from django.utils.six.moves.urllib import parse as urlparse @@ -17,6 +18,7 @@ REQUIRED_FIELDS = ('db_table', 'fname_column', 'blob_column', 'size_column', 'base_url') +@deconstructible class DatabaseStorage(Storage): """ Class DatabaseStorage provides storing files in the database. diff --git a/storages/backends/dropbox.py b/storages/backends/dropbox.py index 040858f89..c76ba2718 100644 --- a/storages/backends/dropbox.py +++ b/storages/backends/dropbox.py @@ -17,6 +17,7 @@ from django.core.exceptions import ImproperlyConfigured from django.core.files.base import File from django.core.files.storage import Storage +from django.utils.deconstruct import deconstructible from django.utils._os import safe_join from storages.utils import setting @@ -46,6 +47,7 @@ def file(self): return self._file +@deconstructible class DropBoxStorage(Storage): """DropBox Storage class for Django pluggable storage system.""" diff --git a/storages/backends/ftp.py b/storages/backends/ftp.py index c49f9c45e..bb705a18c 100644 --- a/storages/backends/ftp.py +++ b/storages/backends/ftp.py @@ -22,6 +22,7 @@ from django.core.files.base import File from django.core.files.storage import Storage from django.core.exceptions import ImproperlyConfigured +from django.utils.deconstruct import deconstructible from django.utils.six.moves.urllib import parse as urlparse from django.utils.six import BytesIO @@ -32,6 +33,7 @@ class FTPStorageException(Exception): pass +@deconstructible class FTPStorage(Storage): """FTP Storage class for Django pluggable storage system.""" diff --git a/storages/backends/gs.py b/storages/backends/gs.py index 318ab0b84..1425ebe2e 100644 --- a/storages/backends/gs.py +++ b/storages/backends/gs.py @@ -1,4 +1,5 @@ from django.core.exceptions import ImproperlyConfigured +from django.utils.deconstruct import deconstructible from django.utils.six import BytesIO from storages.backends.s3boto import S3BotoStorage, S3BotoStorageFile @@ -30,6 +31,7 @@ def close(self): self.key.close() +@deconstructible class GSBotoStorage(S3BotoStorage): connection_class = GSConnection connection_response_error = GSResponseError diff --git a/storages/backends/hashpath.py b/storages/backends/hashpath.py index 252f30fd9..706248522 100644 --- a/storages/backends/hashpath.py +++ b/storages/backends/hashpath.py @@ -3,9 +3,11 @@ import os from django.core.files.storage import FileSystemStorage +from django.utils.deconstruct import deconstructible from django.utils.encoding import force_text, force_bytes +@deconstructible class HashPathStorage(FileSystemStorage): """ Creates a hash from the uploaded file to build the path. diff --git a/storages/backends/image.py b/storages/backends/image.py index 0be152ad1..5c76a8f6d 100644 --- a/storages/backends/image.py +++ b/storages/backends/image.py @@ -3,6 +3,7 @@ from django.core.exceptions import ImproperlyConfigured from django.core.files.storage import FileSystemStorage +from django.utils.deconstruct import deconstructible try: from PIL import ImageFile as PILImageFile @@ -11,6 +12,7 @@ \nSee http://www.pythonware.com/products/pil/") +@deconstructible class ImageStorage(FileSystemStorage): """ A FileSystemStorage which normalizes extensions for images. diff --git a/storages/backends/mogile.py b/storages/backends/mogile.py index 61d123c58..e1d706b85 100644 --- a/storages/backends/mogile.py +++ b/storages/backends/mogile.py @@ -4,6 +4,7 @@ from django.conf import settings from django.core.cache import cache +from django.utils.deconstruct import deconstructible from django.utils.text import force_text from django.http import HttpResponse, HttpResponseNotFound from django.core.exceptions import ImproperlyConfigured @@ -16,6 +17,7 @@ \nSee http://mogilefs.pbworks.com/Client-Libraries") +@deconstructible class MogileFSStorage(Storage): """MogileFS filesystem storage""" def __init__(self, base_url=settings.MEDIA_URL): diff --git a/storages/backends/overwrite.py b/storages/backends/overwrite.py index f4687dae5..64f573bf8 100644 --- a/storages/backends/overwrite.py +++ b/storages/backends/overwrite.py @@ -1,6 +1,8 @@ from django.core.files.storage import FileSystemStorage +from django.utils.deconstruct import deconstructible +@deconstructible class OverwriteStorage(FileSystemStorage): """ Comes from http://www.djangosnippets.org/snippets/976/ diff --git a/storages/backends/sftpstorage.py b/storages/backends/sftpstorage.py index c39425ded..42539b895 100644 --- a/storages/backends/sftpstorage.py +++ b/storages/backends/sftpstorage.py @@ -15,12 +15,14 @@ from django.conf import settings from django.core.files.base import File from django.core.files.storage import Storage +from django.utils.deconstruct import deconstructible from django.utils.six import BytesIO from django.utils.six.moves.urllib import parse as urlparse from storages.utils import setting +@deconstructible class SFTPStorage(Storage): def __init__(self, host, params=None, interactive=None, file_mode=None, diff --git a/storages/backends/symlinkorcopy.py b/storages/backends/symlinkorcopy.py index 9216a2db4..6432190f8 100644 --- a/storages/backends/symlinkorcopy.py +++ b/storages/backends/symlinkorcopy.py @@ -2,6 +2,7 @@ from django.conf import settings from django.core.files.storage import FileSystemStorage +from django.utils.deconstruct import deconstructible __doc__ = """ I needed to efficiently create a mirror of a directory tree (so that @@ -26,6 +27,7 @@ """ +@deconstructible class SymlinkOrCopyStorage(FileSystemStorage): """Stores symlinks to files instead of actual files whenever possible From b334a99438f692104b328936f98cce684834e1e2 Mon Sep 17 00:00:00 2001 From: Benjamin Toueg Date: Wed, 14 Sep 2016 01:38:22 +0200 Subject: [PATCH 025/174] Add support for files > 5 gb with S3BotoStorage (#201) * Add support for files > 5 gb with S3BotoStorage Fixes https://github.com/jschneier/django-storages/issues/194 * Fix tests --- storages/backends/s3boto3.py | 2 +- tests/test_s3boto3.py | 27 ++++++++++++++++----------- 2 files changed, 17 insertions(+), 12 deletions(-) diff --git a/storages/backends/s3boto3.py b/storages/backends/s3boto3.py index 0124c65b8..505850b0c 100644 --- a/storages/backends/s3boto3.py +++ b/storages/backends/s3boto3.py @@ -462,7 +462,7 @@ def _save_content(self, obj, content, parameters): if self.default_acl: put_parameters['ACL'] = self.default_acl content.seek(0, os.SEEK_SET) - obj.put(Body=content, **put_parameters) + obj.upload_fileobj(content, ExtraArgs=put_parameters) def delete(self, name): name = self._normalize_name(self._clean_name(name)) diff --git a/tests/test_s3boto3.py b/tests/test_s3boto3.py index fbfdfdeae..ddedcf847 100644 --- a/tests/test_s3boto3.py +++ b/tests/test_s3boto3.py @@ -118,10 +118,12 @@ def test_storage_save(self): self.storage.bucket.Object.assert_called_once_with(name) obj = self.storage.bucket.Object.return_value - obj.put.assert_called_with( - Body=content, - ContentType='text/plain', - ACL=self.storage.default_acl, + obj.upload_fileobj.assert_called_with( + content, + ExtraArgs={ + 'ContentType': 'text/plain', + 'ACL': self.storage.default_acl, + } ) def test_storage_save_gzip(self): @@ -133,14 +135,17 @@ def test_storage_save_gzip(self): content = ContentFile("I should be gzip'd") self.storage.save(name, content) obj = self.storage.bucket.Object.return_value - obj.put.assert_called_with( - Body=mock.ANY, - ContentType='text/css', - ContentEncoding='gzip', - ACL=self.storage.default_acl + obj.upload_fileobj.assert_called_with( + mock.ANY, + ExtraArgs={ + 'ContentType': 'text/css', + 'ContentEncoding': 'gzip', + 'ACL': self.storage.default_acl, + } ) - body = obj.put.call_args[1]['Body'] - zfile = gzip.GzipFile(mode='rb', fileobj=body) + args, kwargs = obj.upload_fileobj.call_args + content = args[0] + zfile = gzip.GzipFile(mode='rb', fileobj=content) self.assertEquals(zfile.read(), b"I should be gzip'd") def test_compress_content_len(self): From 217231db73ced87e335959468bb38b188af68894 Mon Sep 17 00:00:00 2001 From: Josh Schneier Date: Tue, 13 Sep 2016 19:42:40 -0400 Subject: [PATCH 026/174] CHANGELOG.rst for #194 and #201 --- CHANGELOG.rst | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.rst b/CHANGELOG.rst index 43fd8a4b8..5fdb71808 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -5,9 +5,13 @@ django-storages change log ****************** * **Drop support for Django 1.7** (`#185`_) +* Use ``.putfileobj`` instead of ``.put`` in ``S3Boto3Storage`` to use the transfer manager, + allowing files greater than 5GB to be put on S3 (`#194`_ , `#201`_) * Update ``S3Boto3Storage`` for Django 1.10 (`#181`_) (``get_modified_time`` and ``get_accessed_time``) thanks @JshWright -* Fix bad kwarg name in ``S3Boto3Storage`` when `AWS_PRELOAD_METADATA` is `True` (`#189`_ `#190`_) thanks @leonsmith +* Fix bad kwarg name in ``S3Boto3Storage`` when `AWS_PRELOAD_METADATA` is `True` (`#189`_, `#190`_) thanks @leonsmith +.. _#201: https://github.com/jschneier/django-storages/pull/201 +.. _#194: https://github.com/jschneier/django-storages/issues/194 .. _#190: https://github.com/jschneier/django-storages/pull/190 .. _#189: https://github.com/jschneier/django-storages/issues/189 .. _#185: https://github.com/jschneier/django-storages/pull/185 From 9c3fd230c9a2fe4e73592ee7b5a2e4759d6210f7 Mon Sep 17 00:00:00 2001 From: millarm Date: Wed, 14 Sep 2016 01:14:53 +0100 Subject: [PATCH 027/174] Freeze modification time when gzipping files to S3 (#120) to allow detection of changes in gzipped files by MD5 SUM in the same way as for non gzipped files --- storages/backends/s3boto.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/storages/backends/s3boto.py b/storages/backends/s3boto.py index 47d3ffa6d..7dcc3c280 100644 --- a/storages/backends/s3boto.py +++ b/storages/backends/s3boto.py @@ -372,7 +372,11 @@ def _decode_name(self, name): def _compress_content(self, content): """Gzip a given string content.""" zbuf = BytesIO() - zfile = GzipFile(mode='wb', compresslevel=6, fileobj=zbuf) + # The GZIP header has a modification time attribute (see http://www.zlib.org/rfc-gzip.html) + # This means each time a file is compressed it changes even if the other contents don't change + # For S3 this defeats detection of changes using MD5 sums on gzipped files + # Fixing the mtime at 0.0 at compression time avoids this problem + zfile = GzipFile(mode='wb', compresslevel=6, fileobj=zbuf, mtime=0.0) try: zfile.write(force_bytes(content.read())) finally: From 9963c5d9be206f4c989bc7b40f691d63a0e87f67 Mon Sep 17 00:00:00 2001 From: Josh Schneier Date: Tue, 13 Sep 2016 20:16:51 -0400 Subject: [PATCH 028/174] Use a constant for mtime in GzipFile for stable writes --- storages/backends/s3boto3.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/storages/backends/s3boto3.py b/storages/backends/s3boto3.py index 505850b0c..722a3a872 100644 --- a/storages/backends/s3boto3.py +++ b/storages/backends/s3boto3.py @@ -123,7 +123,7 @@ def _get_file(self): self._file.write(self.obj.get()['Body'].read()) self._file.seek(0) if self._storage.gzip and self.obj.content_encoding == 'gzip': - self._file = GzipFile(mode=self._mode, fileobj=self._file) + self._file = GzipFile(mode=self._mode, fileobj=self._file, mtime=0.0) return self._file def _set_file(self, value): From 48d0e672c03602cf92f5d7427035a2ec9b51dac6 Mon Sep 17 00:00:00 2001 From: Josh Schneier Date: Tue, 13 Sep 2016 20:22:05 -0400 Subject: [PATCH 029/174] CHANGELOG for #120 --- CHANGELOG.rst | 2 ++ 1 file changed, 2 insertions(+) diff --git a/CHANGELOG.rst b/CHANGELOG.rst index 5fdb71808..b58bfdb29 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -5,6 +5,8 @@ django-storages change log ****************** * **Drop support for Django 1.7** (`#185`_) +* Use a fixed ``mtime`` argument for ``GzipFile`` in ``S3BotoStorage`` and ``S3Boto3Storage`` to ensure + a stable output for gzipped files * Use ``.putfileobj`` instead of ``.put`` in ``S3Boto3Storage`` to use the transfer manager, allowing files greater than 5GB to be put on S3 (`#194`_ , `#201`_) * Update ``S3Boto3Storage`` for Django 1.10 (`#181`_) (``get_modified_time`` and ``get_accessed_time``) thanks @JshWright From 25f655463d01796f3ba0c93b3c1021f99bde3129 Mon Sep 17 00:00:00 2001 From: Josh Schneier Date: Tue, 13 Sep 2016 21:28:23 -0400 Subject: [PATCH 030/174] Deprecate a whole bunch of unmaintained backends --- CHANGELOG.rst | 3 +++ setup.py | 2 +- storages/backends/couchdb.py | 8 ++++++++ storages/backends/database.py | 6 ++++++ storages/backends/hashpath.py | 7 +++++++ storages/backends/image.py | 9 ++++++++- storages/backends/mogile.py | 7 +++++++ storages/backends/overwrite.py | 8 ++++++++ storages/backends/symlinkorcopy.py | 6 ++++++ 9 files changed, 54 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.rst b/CHANGELOG.rst index b58bfdb29..db74652c6 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -5,6 +5,8 @@ django-storages change log ****************** * **Drop support for Django 1.7** (`#185`_) +* **Deprecate hashpath, image, overwrite, mogile, symlinkorcopy, database, mogile, couchdb.** + See (`issue #202`_) to discuss maintenance going forward * Use a fixed ``mtime`` argument for ``GzipFile`` in ``S3BotoStorage`` and ``S3Boto3Storage`` to ensure a stable output for gzipped files * Use ``.putfileobj`` instead of ``.put`` in ``S3Boto3Storage`` to use the transfer manager, @@ -12,6 +14,7 @@ django-storages change log * Update ``S3Boto3Storage`` for Django 1.10 (`#181`_) (``get_modified_time`` and ``get_accessed_time``) thanks @JshWright * Fix bad kwarg name in ``S3Boto3Storage`` when `AWS_PRELOAD_METADATA` is `True` (`#189`_, `#190`_) thanks @leonsmith +.. _issue #202: https://github.com/jschneier/django-storages/issues/202 .. _#201: https://github.com/jschneier/django-storages/pull/201 .. _#194: https://github.com/jschneier/django-storages/issues/194 .. _#190: https://github.com/jschneier/django-storages/pull/190 diff --git a/setup.py b/setup.py index 4c28d115e..a1bf39b0b 100644 --- a/setup.py +++ b/setup.py @@ -18,7 +18,7 @@ def get_requirements_tests(): author='Josh Schneier', author_email='josh.schneier@gmail.com', license='BSD', - description='Support for many storages (S3, MogileFS, etc) in Django.', + description='Support for many storages (S3, Libcloud, etc in Django.', long_description=read('README.rst') + '\n\n' + read('CHANGELOG.rst'), url='https://github.com/jschneier/django-storages', classifiers=[ diff --git a/storages/backends/couchdb.py b/storages/backends/couchdb.py index e0216009d..16ef41e45 100644 --- a/storages/backends/couchdb.py +++ b/storages/backends/couchdb.py @@ -4,6 +4,7 @@ (c) Copyright 2009 HUDORA GmbH. All Rights Reserved. """ import os +import warnings from django.conf import settings from django.core.files import File @@ -23,6 +24,13 @@ STORAGE_OPTIONS = getattr(settings, 'COUCHDB_STORAGE_OPTIONS', {}) +warnings.warn( + 'CouchDBStorage is unmaintained and will be removed in the next version of django-storages ' + 'See https://github.com/jschneier/django-storages/issues/202', + PendingDeprecationWarning +) + + @deconstructible class CouchDBStorage(Storage): """ diff --git a/storages/backends/database.py b/storages/backends/database.py index 0f4669a81..81954fc55 100644 --- a/storages/backends/database.py +++ b/storages/backends/database.py @@ -1,5 +1,6 @@ # DatabaseStorage for django. # 2009 (c) GameKeeper Gambling Ltd, Ivanov E. +import warnings from django.conf import settings from django.core.files import File @@ -16,6 +17,11 @@ \nSee https://github.com/mkleehammer/pyodbc") REQUIRED_FIELDS = ('db_table', 'fname_column', 'blob_column', 'size_column', 'base_url') +warnings.warn( + 'DatabaseStorage is unmaintained and will be removed in the next version of django-storages.' + 'See https://github.com/jschneier/django-storages/issues/202', + PendingDeprecationWarning +) @deconstructible diff --git a/storages/backends/hashpath.py b/storages/backends/hashpath.py index 706248522..c161cfc84 100644 --- a/storages/backends/hashpath.py +++ b/storages/backends/hashpath.py @@ -1,11 +1,18 @@ import errno import hashlib import os +import warnings from django.core.files.storage import FileSystemStorage from django.utils.deconstruct import deconstructible from django.utils.encoding import force_text, force_bytes +warnings.warn( + 'HashPathStorage is unmaintaiined and will be removed in the next version of django-storages.' + 'See https://github.com/jschneier/django-storages/issues/202', + PendingDeprecationWarning +) + @deconstructible class HashPathStorage(FileSystemStorage): diff --git a/storages/backends/image.py b/storages/backends/image.py index 5c76a8f6d..22c93a850 100644 --- a/storages/backends/image.py +++ b/storages/backends/image.py @@ -1,5 +1,5 @@ - import os +import warnings from django.core.exceptions import ImproperlyConfigured from django.core.files.storage import FileSystemStorage @@ -12,6 +12,13 @@ \nSee http://www.pythonware.com/products/pil/") +warnings.warn( + 'ImageStorage backend is unmaintainted and will be removed in the next django-storages version' + 'See https://github.com/jschneier/django-storages/issues/202', + PendingDeprecationWarning +) + + @deconstructible class ImageStorage(FileSystemStorage): """ diff --git a/storages/backends/mogile.py b/storages/backends/mogile.py index e1d706b85..d61941943 100644 --- a/storages/backends/mogile.py +++ b/storages/backends/mogile.py @@ -1,6 +1,7 @@ from __future__ import print_function import mimetypes +import warnings from django.conf import settings from django.core.cache import cache @@ -16,6 +17,12 @@ raise ImproperlyConfigured("Could not load mogilefs dependency.\ \nSee http://mogilefs.pbworks.com/Client-Libraries") +warnings.warn( + 'MogileFSStorage is unmaintained and will be removed in the next django-storages version' + 'See https://github.com/jschneier/django-storages/issues/202', + PendingDeprecationWarning +) + @deconstructible class MogileFSStorage(Storage): diff --git a/storages/backends/overwrite.py b/storages/backends/overwrite.py index 64f573bf8..0a55059c1 100644 --- a/storages/backends/overwrite.py +++ b/storages/backends/overwrite.py @@ -1,6 +1,14 @@ +import warnings + from django.core.files.storage import FileSystemStorage from django.utils.deconstruct import deconstructible +warnings.warn( + 'OverwriteStorage is unmaintained and will be removed in the next django-storages version.' + 'See https://github.com/jschneier/django-storages/issues/202', + PendingDeprecationWarning +) + @deconstructible class OverwriteStorage(FileSystemStorage): diff --git a/storages/backends/symlinkorcopy.py b/storages/backends/symlinkorcopy.py index 6432190f8..e5b6e7ef3 100644 --- a/storages/backends/symlinkorcopy.py +++ b/storages/backends/symlinkorcopy.py @@ -1,4 +1,5 @@ import os +import warnings from django.conf import settings from django.core.files.storage import FileSystemStorage @@ -25,6 +26,11 @@ e.g. /htdocs/example.com/image.jpg and modified files will be stored in a temporary directory, e.g. /tmp/image.jpg. """ +warnings.warn( + 'SymlinkOrCopyStorage is unmaintained and will be removed in the next django-storages version.' + 'See https://github.com/jschneier/django-storages/issues/202', + PendingDeprecationWarning +) @deconstructible From 94483448ca405585758f5f5cf5b1e0ec37c31ad1 Mon Sep 17 00:00:00 2001 From: Josh Schneier Date: Tue, 13 Sep 2016 21:29:13 -0400 Subject: [PATCH 031/174] Release version 1.5.1 --- CHANGELOG.rst | 2 +- storages/__init__.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.rst b/CHANGELOG.rst index db74652c6..c784c61cd 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -1,7 +1,7 @@ django-storages change log ========================== -1.5.1 (XXXX-XX-XX) +1.5.1 (2016-09-13) ****************** * **Drop support for Django 1.7** (`#185`_) diff --git a/storages/__init__.py b/storages/__init__.py index 77f1c8e63..51ed7c486 100644 --- a/storages/__init__.py +++ b/storages/__init__.py @@ -1 +1 @@ -__version__ = '1.5.0' +__version__ = '1.5.1' From ac1e1a5900cdd735b79fe33ef3b22e60bf5f8b8e Mon Sep 17 00:00:00 2001 From: Jason Bittel Date: Fri, 16 Sep 2016 08:31:36 -0700 Subject: [PATCH 032/174] Fix host access in SFTPStorage --- storages/backends/sftpstorage.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/storages/backends/sftpstorage.py b/storages/backends/sftpstorage.py index 42539b895..6efdf1234 100644 --- a/storages/backends/sftpstorage.py +++ b/storages/backends/sftpstorage.py @@ -12,7 +12,6 @@ import stat from datetime import datetime -from django.conf import settings from django.core.files.base import File from django.core.files.storage import Storage from django.utils.deconstruct import deconstructible @@ -25,10 +24,10 @@ @deconstructible class SFTPStorage(Storage): - def __init__(self, host, params=None, interactive=None, file_mode=None, + def __init__(self, host=None, params=None, interactive=None, file_mode=None, dir_mode=None, uid=None, gid=None, known_host_file=None, root_path=None, base_url=None): - self._host = host or settings('SFTP_STORAGE_HOST') + self._host = host or setting('SFTP_STORAGE_HOST') self._params = params or setting('SFTP_STORAGE_PARAMS', {}) self._interactive = setting('SFTP_STORAGE_INTERACTIVE', False) \ From 40503aba6bbaa7175fa838e331fb9aa4d4440344 Mon Sep 17 00:00:00 2001 From: Josh Schneier Date: Mon, 26 Sep 2016 14:53:42 -0400 Subject: [PATCH 033/174] Add CHANGELOG for #204 --- CHANGELOG.rst | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/CHANGELOG.rst b/CHANGELOG.rst index c784c61cd..369b1a98b 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -1,6 +1,13 @@ django-storages change log ========================== +1.5.2 (XXXX-XX-XX) +****************** + +* Actually use ``SFTP_STORAGE_HOST`` in ``SFTPStorage`` backend (`#204`_ thanks @jbittel) + +.. _#204: https://github.com/jschneier/django-storages/pull/204 + 1.5.1 (2016-09-13) ****************** From 2e23b72b4e9c4b4319204155d834e95c8fd73bcf Mon Sep 17 00:00:00 2001 From: Josh Schneier Date: Wed, 28 Sep 2016 19:14:37 -0400 Subject: [PATCH 034/174] Document AWS_S3_FILE_OVERWRITE --- docs/backends/amazon-S3.rst | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/docs/backends/amazon-S3.rst b/docs/backends/amazon-S3.rst index 836f3137c..f7e87b4fd 100644 --- a/docs/backends/amazon-S3.rst +++ b/docs/backends/amazon-S3.rst @@ -5,6 +5,7 @@ Usage ***** There is one backend for interacting with S3 based on the boto library. A legacy backend backed on the Amazon S3 Python library was removed in version 1.2. +Another for interacting via Boto3 was added in version 1.5 Settings -------- @@ -63,10 +64,15 @@ To allow ``django-admin.py`` collectstatic to automatically put your static file .. _query parameter authentication: https://docs.aws.amazon.com/AmazonS3/latest/API/sigv4-query-string-auth.html -``AWS_S3_ENCRYPTION`` (optional; default is False) +``AWS_S3_ENCRYPTION`` (optional; default is ``False``) Enable server-side file encryption while at rest, by setting ``encrypt_key`` parameter to True. More info available here: http://boto.cloudhackers.com/en/latest/ref/s3.html +``AWS_S3_FILE_OVERWRITE`` (optional: default is ``True``) + +By default files with the same name will overwrite each other. Set this to ``False`` to have extra characters appended. + + Fields ------ From 65c9f72d51d6aa540e9089979f179db78dd9c13b Mon Sep 17 00:00:00 2001 From: Hugo Osvaldo Barrera Date: Wed, 25 Nov 2015 05:30:26 -0300 Subject: [PATCH 035/174] Document how to use CloudFront --- docs/backends/amazon-S3.rst | 13 +++++++++++++ 1 file changed, 13 insertions(+) diff --git a/docs/backends/amazon-S3.rst b/docs/backends/amazon-S3.rst index f7e87b4fd..cbc372b2d 100644 --- a/docs/backends/amazon-S3.rst +++ b/docs/backends/amazon-S3.rst @@ -72,6 +72,19 @@ Enable server-side file encryption while at rest, by setting ``encrypt_key`` par By default files with the same name will overwrite each other. Set this to ``False`` to have extra characters appended. +CloudFront +~~~~~~~~~~ + +If you're using S3 as a CDN (via CloudFront), you'll probably want this storage +to serve those files using that:: + + AWS_S3_CUSTOM_DOMAIN = 'cdn.mydomain.com' + +Keep in mind you'll have to configure CloudFront to use the proper bucket as an +origin manually for this to work. + +If you need to use multiple storages that are served via CloudFront, pass the +`custom_domain` parameter to their constructors. Fields ------ From ba497d4a535691f6bc23bd71c1655cfa3c23e7a1 Mon Sep 17 00:00:00 2001 From: Victor Haffreingue Date: Fri, 30 Sep 2016 00:40:38 +0100 Subject: [PATCH 036/174] Add location to the create_bucket parameters (#206) * Add location to the create_bucket parameters * Use origin instead of location The location in s3boto is the beginning of the path to the file, and not the bucket location. Use a new config variable AWS_ORIGIN and its corresponding origin value instead. * Use the boto location as the fallback origin Default to '' --- storages/backends/s3boto.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/storages/backends/s3boto.py b/storages/backends/s3boto.py index 7dcc3c280..958774ea3 100644 --- a/storages/backends/s3boto.py +++ b/storages/backends/s3boto.py @@ -15,7 +15,7 @@ try: from boto import __version__ as boto_version - from boto.s3.connection import S3Connection, SubdomainCallingFormat + from boto.s3.connection import S3Connection, SubdomainCallingFormat, Location from boto.exception import S3ResponseError from boto.s3.key import Key as S3Key from boto.utils import parse_ts, ISO8601 @@ -218,6 +218,7 @@ class S3BotoStorage(Storage): querystring_expire = setting('AWS_QUERYSTRING_EXPIRE', 3600) reduced_redundancy = setting('AWS_REDUCED_REDUNDANCY', False) location = setting('AWS_LOCATION', '') + origin = setting('AWS_ORIGIN', Location.DEFAULT) encryption = setting('AWS_S3_ENCRYPTION', False) custom_domain = setting('AWS_S3_CUSTOM_DOMAIN') calling_format = setting('AWS_S3_CALLING_FORMAT', SubdomainCallingFormat()) @@ -328,7 +329,7 @@ def _get_or_create_bucket(self, name): return self.connection.get_bucket(name, validate=self.auto_create_bucket) except self.connection_response_error: if self.auto_create_bucket: - bucket = self.connection.create_bucket(name) + bucket = self.connection.create_bucket(name, location=self.origin) bucket.set_acl(self.bucket_acl) return bucket raise ImproperlyConfigured("Bucket %s does not exist. Buckets " From 37ff23e0644606be5b195577e83ff5178fb24f35 Mon Sep 17 00:00:00 2001 From: Tim Graham Date: Tue, 4 Oct 2016 11:14:37 -0400 Subject: [PATCH 037/174] Fix supported Django versions in README (#210) --- README.rst | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/README.rst b/README.rst index 2b16bdecb..39b5fc582 100644 --- a/README.rst +++ b/README.rst @@ -33,7 +33,9 @@ please consult the documentation for a comprehensive list. About ===== django-storages is a project to provide a variety of storage backends in a single library. -This library is compatible with Django >= 1.7. It should also works with 1.6.2+ but no guarantees are made. + +This library is usually compatible with the currently supported versions of +Django. Check the trove classifiers in setup.py to be sure. History ======= From 4c578a0dc85b1699035da3b2f281da2983159b33 Mon Sep 17 00:00:00 2001 From: Alec Brunelle Date: Mon, 17 Oct 2016 14:13:28 -0400 Subject: [PATCH 038/174] Add necessary steps for azure backend installation --- docs/backends/azure.rst | 16 ++++++++++++---- 1 file changed, 12 insertions(+), 4 deletions(-) diff --git a/docs/backends/azure.rst b/docs/backends/azure.rst index 6433b17b1..54378db90 100644 --- a/docs/backends/azure.rst +++ b/docs/backends/azure.rst @@ -3,6 +3,16 @@ Azure Storage A custom storage system for Django using Windows Azure Storage backend. +Before you start configuration, you will need to install the Azure SDK for Python. + +Install the package:: + + pip install azure + +Add to your requirements file:: + + pip freeze > requirements.txt + Settings ******* @@ -17,7 +27,7 @@ This setting sets the path to the Azure storage class:: ``AZURE_ACCOUNT_NAME`` This setting is the Windows Azure Storage Account name, which in many cases is also the first part of the url for instance: http://azure_account_name.blob.core.windows.net/ would mean:: - + AZURE_ACCOUNT_NAME = "azure_account_name" ``AZURE_ACCOUNT_KEY`` @@ -26,7 +36,5 @@ This is the private key that gives your Django app access to your Windows Azure ``AZURE_CONTAINER`` -This is where the files uploaded through your Django app will be uploaded. +This is where the files uploaded through your Django app will be uploaded. The container must be already created as the storage system will not attempt to create it. - - From c366cbfbc8f4c4b9e0aba61a45adae7088b44b48 Mon Sep 17 00:00:00 2001 From: Jon Dufresne Date: Sun, 30 Oct 2016 10:27:04 -0700 Subject: [PATCH 039/174] Use assertEqual in place of deprecated assertEquals (#223) Fixes warnings of the form "DeprecationWarning: Please use assertEqual instead." when running tests with warnings enabled. --- tests/test_s3boto.py | 16 ++++++++-------- tests/test_s3boto3.py | 22 +++++++++++----------- 2 files changed, 19 insertions(+), 19 deletions(-) diff --git a/tests/test_s3boto.py b/tests/test_s3boto.py index e19ed92b9..93e678d1d 100644 --- a/tests/test_s3boto.py +++ b/tests/test_s3boto.py @@ -31,20 +31,20 @@ def setUp(self, S3Connection): class SafeJoinTest(TestCase): def test_normal(self): path = s3boto.safe_join("", "path/to/somewhere", "other", "path/to/somewhere") - self.assertEquals(path, "path/to/somewhere/other/path/to/somewhere") + self.assertEqual(path, "path/to/somewhere/other/path/to/somewhere") def test_with_dot(self): path = s3boto.safe_join("", "path/./somewhere/../other", "..", ".", "to/./somewhere") - self.assertEquals(path, "path/to/somewhere") + self.assertEqual(path, "path/to/somewhere") def test_base_url(self): path = s3boto.safe_join("base_url", "path/to/somewhere") - self.assertEquals(path, "base_url/path/to/somewhere") + self.assertEqual(path, "base_url/path/to/somewhere") def test_base_url_with_slash(self): path = s3boto.safe_join("base_url/", "path/to/somewhere") - self.assertEquals(path, "base_url/path/to/somewhere") + self.assertEqual(path, "base_url/path/to/somewhere") def test_suspicious_operation(self): self.assertRaises(ValueError, @@ -55,14 +55,14 @@ def test_trailing_slash(self): Test safe_join with paths that end with a trailing slash. """ path = s3boto.safe_join("base_url/", "path/to/somewhere/") - self.assertEquals(path, "base_url/path/to/somewhere/") + self.assertEqual(path, "base_url/path/to/somewhere/") def test_trailing_slash_multi(self): """ Test safe_join with multiple paths that end with a trailing slash. """ path = s3boto.safe_join("base_url/", "path/to/" "somewhere/") - self.assertEquals(path, "base_url/path/to/somewhere/") + self.assertEqual(path, "base_url/path/to/somewhere/") class S3BotoStorageTests(S3BotoTestCase): @@ -295,7 +295,7 @@ def test_storage_url(self): response_headers=None, ) - self.assertEquals(self.storage.url(name), url) + self.assertEqual(self.storage.url(name), url) self.storage.connection.generate_url.assert_called_with( self.storage.querystring_expire, **kwargs @@ -303,7 +303,7 @@ def test_storage_url(self): custom_expire = 123 - self.assertEquals(self.storage.url(name, expire=custom_expire), url) + self.assertEqual(self.storage.url(name, expire=custom_expire), url) self.storage.connection.generate_url.assert_called_with( custom_expire, **kwargs diff --git a/tests/test_s3boto3.py b/tests/test_s3boto3.py index ddedcf847..5218cc906 100644 --- a/tests/test_s3boto3.py +++ b/tests/test_s3boto3.py @@ -30,20 +30,20 @@ def setUp(self, resource): class SafeJoinTest(TestCase): def test_normal(self): path = s3boto3.safe_join("", "path/to/somewhere", "other", "path/to/somewhere") - self.assertEquals(path, "path/to/somewhere/other/path/to/somewhere") + self.assertEqual(path, "path/to/somewhere/other/path/to/somewhere") def test_with_dot(self): path = s3boto3.safe_join("", "path/./somewhere/../other", "..", ".", "to/./somewhere") - self.assertEquals(path, "path/to/somewhere") + self.assertEqual(path, "path/to/somewhere") def test_base_url(self): path = s3boto3.safe_join("base_url", "path/to/somewhere") - self.assertEquals(path, "base_url/path/to/somewhere") + self.assertEqual(path, "base_url/path/to/somewhere") def test_base_url_with_slash(self): path = s3boto3.safe_join("base_url/", "path/to/somewhere") - self.assertEquals(path, "base_url/path/to/somewhere") + self.assertEqual(path, "base_url/path/to/somewhere") def test_suspicious_operation(self): self.assertRaises(ValueError, @@ -54,14 +54,14 @@ def test_trailing_slash(self): Test safe_join with paths that end with a trailing slash. """ path = s3boto3.safe_join("base_url/", "path/to/somewhere/") - self.assertEquals(path, "base_url/path/to/somewhere/") + self.assertEqual(path, "base_url/path/to/somewhere/") def test_trailing_slash_multi(self): """ Test safe_join with multiple paths that end with a trailing slash. """ path = s3boto3.safe_join("base_url/", "path/to/" "somewhere/") - self.assertEquals(path, "base_url/path/to/somewhere/") + self.assertEqual(path, "base_url/path/to/somewhere/") class S3Boto3StorageTests(S3Boto3TestCase): @@ -146,7 +146,7 @@ def test_storage_save_gzip(self): args, kwargs = obj.upload_fileobj.call_args content = args[0] zfile = gzip.GzipFile(mode='rb', fileobj=content) - self.assertEquals(zfile.read(), b"I should be gzip'd") + self.assertEqual(zfile.read(), b"I should be gzip'd") def test_compress_content_len(self): """ @@ -293,7 +293,7 @@ def test_storage_url(self): url = 'http://aws.amazon.com/%s' % name self.storage.bucket.meta.client.generate_presigned_url.return_value = url self.storage.bucket.name = 'bucket' - self.assertEquals(self.storage.url(name), url) + self.assertEqual(self.storage.url(name), url) self.storage.bucket.meta.client.generate_presigned_url.assert_called_with( 'get_object', Params={'Bucket': self.storage.bucket.name, 'Key': name}, @@ -302,7 +302,7 @@ def test_storage_url(self): custom_expire = 123 - self.assertEquals(self.storage.url(name, expire=custom_expire), url) + self.assertEqual(self.storage.url(name, expire=custom_expire), url) self.storage.bucket.meta.client.generate_presigned_url.assert_called_with( 'get_object', Params={'Bucket': self.storage.bucket.name, 'Key': name}, @@ -320,7 +320,7 @@ def test_generated_url_is_encoded(self): def test_strip_signing_parameters(self): expected = 'http://bucket.s3-aws-region.amazonaws.com/foo/bar' - self.assertEquals(self.storage._strip_signing_parameters( + self.assertEqual(self.storage._strip_signing_parameters( '%s?X-Amz-Date=12345678&X-Amz-Signature=Signature' % expected), expected) - self.assertEquals(self.storage._strip_signing_parameters( + self.assertEqual(self.storage._strip_signing_parameters( '%s?expires=12345678&signature=Signature' % expected), expected) From cc78c566225dd7c2406a47c5a02679fec98e3ede Mon Sep 17 00:00:00 2001 From: Adam Dobrawy Date: Wed, 11 Jan 2017 21:57:28 +0100 Subject: [PATCH 040/174] Fix syntax of docs (#242) --- docs/backends/amazon-S3.rst | 61 ++++++++++++---------------- docs/backends/azure.rst | 15 ++++--- docs/backends/database.rst | 2 +- docs/backends/dropbox.rst | 8 +--- docs/backends/ftp.rst | 8 +--- docs/backends/mogilefs.rst | 22 ++++++++--- docs/backends/sftp.rst | 79 ++++++++++++++++--------------------- 7 files changed, 89 insertions(+), 106 deletions(-) diff --git a/docs/backends/amazon-S3.rst b/docs/backends/amazon-S3.rst index cbc372b2d..17118b078 100644 --- a/docs/backends/amazon-S3.rst +++ b/docs/backends/amazon-S3.rst @@ -14,63 +14,52 @@ To use s3boto set:: DEFAULT_FILE_STORAGE = 'storages.backends.s3boto.S3BotoStorage' -``AWS_ACCESS_KEY_ID`` +To allow ``django-admin.py`` collectstatic to automatically put your static files in your bucket set the following in your settings.py:: -Your Amazon Web Services access key, as a string. + STATICFILES_STORAGE = 'storages.backends.s3boto.S3BotoStorage' -``AWS_SECRET_ACCESS_KEY`` +Available are numerous settings. It should be especially noted the following: -Your Amazon Web Services secret access key, as a string. +``AWS_ACCESS_KEY_ID`` + Your Amazon Web Services access key, as a string. -``AWS_STORAGE_BUCKET_NAME`` +``AWS_SECRET_ACCESS_KEY`` + Your Amazon Web Services secret access key, as a string. -Your Amazon Web Services storage bucket name, as a string. +``AWS_STORAGE_BUCKET_NAME`` + Your Amazon Web Services storage bucket name, as a string. ``AWS_DEFAULT_ACL`` (optional) - -If set to ``private`` changes uploaded file's Access Control List from the default permission ``public-read`` to give owner full control and remove read access from everyone else. + If set to ``private`` changes uploaded file's Access Control List from the default permission ``public-read`` to give owner full control and remove read access from everyone else. ``AWS_AUTO_CREATE_BUCKET`` (optional) - -If set to ``True`` the bucket specified in ``AWS_STORAGE_BUCKET_NAME`` is automatically created. - + If set to ``True`` the bucket specified in ``AWS_STORAGE_BUCKET_NAME`` is automatically created. ``AWS_HEADERS`` (optional) + If you'd like to set headers sent with each file of the storage:: -If you'd like to set headers sent with each file of the storage:: - - # see http://developer.yahoo.com/performance/rules.html#expires - AWS_HEADERS = { - 'Expires': 'Thu, 15 Apr 2010 20:00:00 GMT', - 'Cache-Control': 'max-age=86400', - } + # see http://developer.yahoo.com/performance/rules.html#expires + AWS_HEADERS = { + 'Expires': 'Thu, 15 Apr 2010 20:00:00 GMT', + 'Cache-Control': 'max-age=86400', + } ``AWS_QUERYSTRING_AUTH`` (optional; default is ``True``) - -Setting ``AWS_QUERYSTRING_AUTH`` to ``False`` removes `query parameter -authentication`_ from generated URLs. This can be useful if your S3 buckets are -public. + Setting ``AWS_QUERYSTRING_AUTH`` to ``False`` removes `query parameter + authentication`_ from generated URLs. This can be useful if your S3 buckets are + public. ``AWS_QUERYSTRING_EXPIRE`` (optional; default is 3600 seconds) - -The number of seconds that a generated URL with `query parameter -authentication`_ is valid for. - - -To allow ``django-admin.py`` collectstatic to automatically put your static files in your bucket set the following in your settings.py:: - - STATICFILES_STORAGE = 'storages.backends.s3boto.S3BotoStorage' - - -.. _query parameter authentication: https://docs.aws.amazon.com/AmazonS3/latest/API/sigv4-query-string-auth.html + The number of seconds that a generated URL with `query parameter + authentication`_ is valid for. ``AWS_S3_ENCRYPTION`` (optional; default is ``False``) - -Enable server-side file encryption while at rest, by setting ``encrypt_key`` parameter to True. More info available here: http://boto.cloudhackers.com/en/latest/ref/s3.html + Enable server-side file encryption while at rest, by setting ``encrypt_key`` parameter to True. More info available here: http://boto.cloudhackers.com/en/latest/ref/s3.html ``AWS_S3_FILE_OVERWRITE`` (optional: default is ``True``) + By default files with the same name will overwrite each other. Set this to ``False`` to have extra characters appended. -By default files with the same name will overwrite each other. Set this to ``False`` to have extra characters appended. +.. _query parameter authentication: https://docs.aws.amazon.com/AmazonS3/latest/API/sigv4-query-string-auth.html CloudFront ~~~~~~~~~~ diff --git a/docs/backends/azure.rst b/docs/backends/azure.rst index 54378db90..da4fa765a 100644 --- a/docs/backends/azure.rst +++ b/docs/backends/azure.rst @@ -17,24 +17,23 @@ Add to your requirements file:: Settings ******* -``DEFAULT_FILE_STORAGE`` - -This setting sets the path to the Azure storage class:: +To use `AzureStorage` set:: DEFAULT_FILE_STORAGE = 'storages.backends.azure_storage.AzureStorage' +The following settings are available: ``AZURE_ACCOUNT_NAME`` -This setting is the Windows Azure Storage Account name, which in many cases is also the first part of the url for instance: http://azure_account_name.blob.core.windows.net/ would mean:: + This setting is the Windows Azure Storage Account name, which in many cases is also the first part of the url for instance: http://azure_account_name.blob.core.windows.net/ would mean:: - AZURE_ACCOUNT_NAME = "azure_account_name" + AZURE_ACCOUNT_NAME = "azure_account_name" ``AZURE_ACCOUNT_KEY`` -This is the private key that gives your Django app access to your Windows Azure Account. + This is the private key that gives your Django app access to your Windows Azure Account. ``AZURE_CONTAINER`` -This is where the files uploaded through your Django app will be uploaded. -The container must be already created as the storage system will not attempt to create it. + This is where the files uploaded through your Django app will be uploaded. + The container must be already created as the storage system will not attempt to create it. diff --git a/docs/backends/database.rst b/docs/backends/database.rst index bc86e1c31..e2f40413a 100644 --- a/docs/backends/database.rst +++ b/docs/backends/database.rst @@ -56,4 +56,4 @@ Note: It returns special path, which should be mapped to special view, which ret response['Content-Disposition'] = 'inline; filename=%s'%filename return response -Note: If filename exist, blob will be overwritten, to change this remove get_available_name(self, name), so Storage.get_available_name(self, name) will be used to generate new filename. +.. note:: If filename exist, blob will be overwritten, to change this remove get_available_name(self, name), so Storage.get_available_name(self, name) will be used to generate new filename. diff --git a/docs/backends/dropbox.rst b/docs/backends/dropbox.rst index e884f10cf..5fa925e6c 100644 --- a/docs/backends/dropbox.rst +++ b/docs/backends/dropbox.rst @@ -4,14 +4,10 @@ DropBox Settings -------- - ``DROPBOX_OAUTH2_TOKEN`` - -Your DropBox token, if you haven't follow this `guide step`_. + Your DropBox token, if you haven't follow this `guide step`_. ``DROPBOX_ROOT_PATH`` - -Allow to jail your storage to a defined directory. - + Allow to jail your storage to a defined directory. .. _`guide step`: https://www.dropbox.com/developers/documentation/python#tutorial diff --git a/docs/backends/ftp.rst b/docs/backends/ftp.rst index c8d8e6a8b..64320e54c 100644 --- a/docs/backends/ftp.rst +++ b/docs/backends/ftp.rst @@ -9,11 +9,7 @@ Settings -------- ``LOCATION`` - -URL of the server that hold the files. -Example ``'ftp://:@:'`` + URL of the server that hold the files. Example ``'ftp://:@:'`` ``BASE_URL`` - -URL that serves the files stored at this location. Defaults to the value of -your ``MEDIA_URL`` setting. + URL that serves the files stored at this location. Defaults to the value of your ``MEDIA_URL`` setting. diff --git a/docs/backends/mogilefs.rst b/docs/backends/mogilefs.rst index a330fdda0..7d868fec4 100644 --- a/docs/backends/mogilefs.rst +++ b/docs/backends/mogilefs.rst @@ -5,11 +5,23 @@ This storage allows you to use MogileFS, it comes from this blog post. The MogileFS storage backend is fairly simple: it uses URLs (or, rather, parts of URLs) as keys into the mogile database. When the user requests a file stored by mogile (say, an avatar), the URL gets passed to a view which, using a client to the mogile tracker, retrieves the "correct" path (the path that points to the actual file data). The view will then either return the path(s) to perlbal to reproxy, or, if you're not using perlbal to reproxy (which you should), it serves the data of the file directly from django. -* ``MOGILEFS_DOMAIN``: The mogile domain that files should read from/written to, e.g "production" -* ``MOGILEFS_TRACKERS``: A list of trackers to connect to, e.g. ["foo.sample.com:7001", "bar.sample.com:7001"] -* ``MOGILEFS_MEDIA_URL`` (optional): The prefix for URLs that point to mogile files. This is used in a similar way to ``MEDIA_URL``, e.g. "/mogilefs/" -* ``SERVE_WITH_PERLBAL``: Boolean that, when True, will pass the paths back in the response in the ``X-REPROXY-URL`` header. If False, django will serve all mogile media files itself (bad idea for production, but useful if you're testing on a setup that doesn't have perlbal running) -* ``DEFAULT_FILE_STORAGE``: This is the class that's used for the backend. You'll want to set this to ``project.app.storages.MogileFSStorage`` (or wherever you've installed the backend) +To use `MogileFSStorage` set:: + + DEFAULT_FILE_STORAGE = 'storages.backends.mogile.MogileFSStorage' + +The following settings are available: + +``MOGILEFS_DOMAIN`` + The mogile domain that files should read from/written to, e.g "production" + +``MOGILEFS_TRACKERS`` + A list of trackers to connect to, e.g. ["foo.sample.com:7001", "bar.sample.com:7001"] + +``MOGILEFS_MEDIA_URL`` (optional) + The prefix for URLs that point to mogile files. This is used in a similar way to ``MEDIA_URL``, e.g. "/mogilefs/" + +``SERVE_WITH_PERLBAL`` + Boolean that, when True, will pass the paths back in the response in the ``X-REPROXY-URL`` header. If False, django will serve all mogile media files itself (bad idea for production, but useful if you're testing on a setup that doesn't have perlbal running) Getting files into mogile ************************* diff --git a/docs/backends/sftp.rst b/docs/backends/sftp.rst index a754097d8..9d18e074d 100644 --- a/docs/backends/sftp.rst +++ b/docs/backends/sftp.rst @@ -5,64 +5,55 @@ Settings -------- ``SFTP_STORAGE_HOST`` - -The hostname where you want the files to be saved. + The hostname where you want the files to be saved. ``SFTP_STORAGE_ROOT`` + The root directory on the remote host into which files should be placed. + Should work the same way that ``STATIC_ROOT`` works for local files. Must + include a trailing slash. -The root directory on the remote host into which files should be placed. -Should work the same way that ``STATIC_ROOT`` works for local files. Must -include a trailing slash. - -``SFTP_STORAGE_PARAMS`` (Optional) - -A dictionary containing connection parameters to be passed as keyword -arguments to ``paramiko.SSHClient().connect()`` (do not include hostname here). -See `paramiko SSHClient.connect() documentation`_ for details +``SFTP_STORAGE_PARAMS`` (optional) + A dictionary containing connection parameters to be passed as keyword + arguments to ``paramiko.SSHClient().connect()`` (do not include hostname here). + See `paramiko SSHClient.connect() documentation`_ for details -.. _`paramiko SSHClient.connect() documentation`: http://docs.paramiko.org/en/latest/api/client.html#paramiko.client.SSHClient.connect +``SFTP_STORAGE_INTERACTIVE`` (optional) + A boolean indicating whether to prompt for a password if the connection cannot + be made using keys, and there is not already a password in + ``SFTP_STORAGE_PARAMS``. You can set this to ``True`` to enable interactive + login when running ``manage.py collectstatic``, for example. -``SFTP_STORAGE_INTERACTIVE`` (Optional) + .. warning:: -A boolean indicating whether to prompt for a password if the connection cannot -be made using keys, and there is not already a password in -``SFTP_STORAGE_PARAMS``. You can set this to ``True`` to enable interactive -login when running ``manage.py collectstatic``, for example. + DO NOT set SFTP_STORAGE_INTERACTIVE to True if you are using this storage + for files being uploaded to your site by users, because you'll have no way + to enter the password when they submit the form.. -.. warning:: +``SFTP_STORAGE_FILE_MODE`` (optional) + A bitmask for setting permissions on newly-created files. See + `Python os.chmod documentation`_ for acceptable values. - DO NOT set SFTP_STORAGE_INTERACTIVE to True if you are using this storage - for files being uploaded to your site by users, because you'll have no way - to enter the password when they submit the form.. +``SFTP_STORAGE_DIR_MODE`` (optional) + A bitmask for setting permissions on newly-created directories. See + `Python os.chmod documentation`_ for acceptable values. -``SFTP_STORAGE_FILE_MODE`` (Optional) + .. note:: -A bitmask for setting permissions on newly-created files. See -`Python os.chmod documentation`_ for acceptable values. + Hint: if you start the mode number with a 0 you can express it in octal + just like you would when doing "chmod 775 myfile" from bash. +``SFTP_STORAGE_UID`` (optional) + UID of the account that should be set as owner of the files on the remote + host. You may have to be root to set this. -``SFTP_STORAGE_DIR_MODE`` (Optional) +``SFTP_STORAGE_GID`` (optional) + GID of the group that should be set on the files on the remote host. You have + to be a member of the group to set this. -A bitmask for setting permissions on newly-created directories. See -`Python os.chmod documentation`_ for acceptable values. +``SFTP_KNOWN_HOST_FILE`` (optional) + Absolute path of know host file, if it isn't set ``"~/.ssh/known_hosts"`` will be used. -.. note:: - Hint: if you start the mode number with a 0 you can express it in octal - just like you would when doing "chmod 775 myfile" from bash. +.. _`paramiko SSHClient.connect() documentation`: http://docs.paramiko.org/en/latest/api/client.html#paramiko.client.SSHClient.connect .. _`Python os.chmod documentation`: http://docs.python.org/library/os.html#os.chmod - -``SFTP_STORAGE_UID`` (Optional) - -UID of the account that should be set as owner of the files on the remote -host. You may have to be root to set this. - -``SFTP_STORAGE_GID`` (Optional) - -GID of the group that should be set on the files on the remote host. You have -to be a member of the group to set this. - -``SFTP_KNOWN_HOST_FILE`` (Optional) - -Absolute path of know host file, if it isn't set ``"~/.ssh/known_hosts"`` will be used. From ed45c806e019faf96583148e5c93512eb1938b58 Mon Sep 17 00:00:00 2001 From: Jon Dufresne Date: Wed, 11 Jan 2017 13:02:21 -0800 Subject: [PATCH 041/174] Fix S3Boto3Storage to avoid race conditions in a multi-threaded WSGI environment (#238) Use a different boto3 session for each instance of the S3 connection. --- CHANGELOG.rst | 1 + storages/backends/s3boto3.py | 6 +++--- tests/test_s3boto3.py | 3 +-- 3 files changed, 5 insertions(+), 5 deletions(-) diff --git a/CHANGELOG.rst b/CHANGELOG.rst index 369b1a98b..9fdf5c33e 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -5,6 +5,7 @@ django-storages change log ****************** * Actually use ``SFTP_STORAGE_HOST`` in ``SFTPStorage`` backend (`#204`_ thanks @jbittel) +* Fix ``S3Boto3Storage`` to avoid race conditions in a multi-threaded WSGI environment .. _#204: https://github.com/jschneier/django-storages/pull/204 diff --git a/storages/backends/s3boto3.py b/storages/backends/s3boto3.py index 722a3a872..b08382c05 100644 --- a/storages/backends/s3boto3.py +++ b/storages/backends/s3boto3.py @@ -14,7 +14,7 @@ from django.utils.timezone import localtime try: - from boto3 import resource + import boto3.session from boto3 import __version__ as boto3_version from botocore.client import Config from botocore.exceptions import ClientError @@ -199,7 +199,6 @@ class S3Boto3Storage(Storage): mode and supports streaming(buffering) data in chunks to S3 when writing. """ - connection_class = staticmethod(resource) connection_service_name = 's3' default_content_type = 'application/octet-stream' connection_response_error = ClientError @@ -285,7 +284,8 @@ def connection(self): # urllib/requests libraries read. See https://github.com/boto/boto3/issues/338 # and http://docs.python-requests.org/en/latest/user/advanced/#proxies if self._connection is None: - self._connection = self.connection_class( + session = boto3.session.Session() + self._connection = session.resource( self.connection_service_name, aws_access_key_id=self.access_key, aws_secret_access_key=self.secret_key, diff --git a/tests/test_s3boto3.py b/tests/test_s3boto3.py index 5218cc906..29c43bd5d 100644 --- a/tests/test_s3boto3.py +++ b/tests/test_s3boto3.py @@ -21,8 +21,7 @@ class S3Boto3TestCase(TestCase): - @mock.patch('storages.backends.s3boto3.resource') - def setUp(self, resource): + def setUp(self): self.storage = s3boto3.S3Boto3Storage() self.storage._connection = mock.MagicMock() From 5d6b85e74906ce379a70b9d06edc7ba59e1a185d Mon Sep 17 00:00:00 2001 From: Jon Dufresne Date: Wed, 11 Jan 2017 13:02:51 -0800 Subject: [PATCH 042/174] Allow newer versions of pytest-cov for tests (#239) Newer version has removed deprecated code. Fixes deprecation warning when running tests with warnings enabled like: .../django-storages/lib64/python3.5/site-packages/pytest_cov/plugin.py:37: DeprecationWarning: type argument to addoption() is a string 'int'. For parsearg this should be a type. (options: ('--cov-fail-under',)) help='Fail if the total coverage is less than MIN.') --- requirements-tests.txt | 2 +- tox.ini | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/requirements-tests.txt b/requirements-tests.txt index e9bf66d71..d87d26210 100644 --- a/requirements-tests.txt +++ b/requirements-tests.txt @@ -1,5 +1,5 @@ Django>=1.7 -pytest-cov==2.2.1 +pytest-cov>=2.2.1 boto>=2.32.0 boto3>=1.2.3 dropbox>=3.24 diff --git a/tox.ini b/tox.ini index 9c1a1e70f..d181cd7e5 100644 --- a/tox.ini +++ b/tox.ini @@ -16,7 +16,7 @@ deps = django110: Django>=1.10, <1.11 py27: mock==1.0.1 boto>=2.32.0 - pytest-cov==2.2.1 + pytest-cov>=2.2.1 boto3>=1.2.3 dropbox>=3.24 paramiko From b749c0932221068441e330c382cd6e6f505d5d94 Mon Sep 17 00:00:00 2001 From: Jon Dufresne Date: Wed, 11 Jan 2017 13:03:13 -0800 Subject: [PATCH 043/174] Remove unnecessary '-' parsing from boto3 version (#237) boto3 version has never contained a '-'. Likely copied over from boto which did use '-' at one time. --- storages/backends/s3boto3.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/storages/backends/s3boto3.py b/storages/backends/s3boto3.py index b08382c05..db7c0f624 100644 --- a/storages/backends/s3boto3.py +++ b/storages/backends/s3boto3.py @@ -24,7 +24,7 @@ from storages.utils import setting -boto3_version_info = tuple([int(i) for i in boto3_version.split('-')[0].split('.')]) +boto3_version_info = tuple([int(i) for i in boto3_version.split('.')]) if boto3_version_info[:2] < (1, 2): raise ImproperlyConfigured("The installed Boto3 library must be 1.2.0 or " From 07abcc9a4a80724f9254248872c4aa91f832066e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?piglei=E2=84=A2?= Date: Fri, 13 Jan 2017 00:28:07 +0800 Subject: [PATCH 044/174] Fix S3Boto3: Double localtime call Exception on modified_time when USE_TZ=False #234 (#235) --- AUTHORS | 1 + storages/backends/s3boto3.py | 9 +++++++-- tests/test_s3boto3.py | 15 +++++++++++++-- 3 files changed, 21 insertions(+), 4 deletions(-) diff --git a/AUTHORS b/AUTHORS index 0991c0ee7..530f21745 100644 --- a/AUTHORS +++ b/AUTHORS @@ -25,6 +25,7 @@ By order of apparition, thanks: * Anthony Monthe (Dropbox) * EunPyo (Andrew) Hong (Azure) * Michael Barrientos (S3 with Boto3) + * piglei (patches) Extra thanks to Marty for adding this in Django, you can buy his very interesting book (Pro Django). diff --git a/storages/backends/s3boto3.py b/storages/backends/s3boto3.py index db7c0f624..fd9cf0b72 100644 --- a/storages/backends/s3boto3.py +++ b/storages/backends/s3boto3.py @@ -11,7 +11,7 @@ from django.utils.encoding import force_text, smart_str, filepath_to_uri, force_bytes from django.utils.six.moves.urllib import parse as urlparse from django.utils.six import BytesIO -from django.utils.timezone import localtime +from django.utils.timezone import localtime, is_naive try: import boto3.session @@ -534,7 +534,12 @@ def get_modified_time(self, name): def modified_time(self, name): """Returns a naive datetime object containing the last modified time.""" - return localtime(self.get_modified_time(name)).replace(tzinfo=None) + # If get_modified_time already returns a naive DateTime object, which happens + # when USE_TZ=False, return it directly instead of transfer it. + mtime = self.get_modified_time(name) + if is_naive(mtime): + return mtime + return localtime(mtime).replace(tzinfo=None) def _strip_signing_parameters(self, url): # Boto3 does not currently support generating URLs that are unsigned. Instead we diff --git a/tests/test_s3boto3.py b/tests/test_s3boto3.py index 29c43bd5d..09e823b4a 100644 --- a/tests/test_s3boto3.py +++ b/tests/test_s3boto3.py @@ -6,6 +6,7 @@ import mock from django.test import TestCase +from django.conf import settings from django.core.files.base import ContentFile from django.utils.six.moves.urllib import parse as urlparse from django.utils.timezone import is_aware, utc @@ -273,6 +274,12 @@ def test_storage_size(self): self.assertEqual(self.storage.size(name), obj.content_length) def test_storage_mtime(self): + # Test both USE_TZ cases + for use_tz in (True, False): + with self.settings(USE_TZ=use_tz): + self._test_storage_mtime(use_tz) + + def _test_storage_mtime(self, use_tz): obj = self.storage.bucket.Object.return_value obj.last_modified = datetime.now(utc) @@ -282,9 +289,13 @@ def test_storage_mtime(self): 'Naive datetime object expected from modified_time()' ) - self.assertTrue( + self.assertIs( + settings.USE_TZ, is_aware(self.storage.get_modified_time(name)), - 'Aware datetime object expected from get_modified_time()' + '%s datetime object expected from get_modified_time() when USE_TZ=%s' % ( + ('Naive', 'Aware')[settings.USE_TZ], + settings.USE_TZ + ) ) def test_storage_url(self): From fb2d41f39c5201ad48a9a4df929774d8308efc56 Mon Sep 17 00:00:00 2001 From: Josh Schneier Date: Thu, 12 Jan 2017 13:42:54 -0300 Subject: [PATCH 045/174] Comment/code tweak --- storages/backends/s3boto3.py | 8 +++----- 1 file changed, 3 insertions(+), 5 deletions(-) diff --git a/storages/backends/s3boto3.py b/storages/backends/s3boto3.py index fd9cf0b72..67391cef6 100644 --- a/storages/backends/s3boto3.py +++ b/storages/backends/s3boto3.py @@ -534,12 +534,10 @@ def get_modified_time(self, name): def modified_time(self, name): """Returns a naive datetime object containing the last modified time.""" - # If get_modified_time already returns a naive DateTime object, which happens - # when USE_TZ=False, return it directly instead of transfer it. + # If USE_TZ=False then get_modified_time will return a naive datetime + # so we just return that, else we have to localize and strip the tz mtime = self.get_modified_time(name) - if is_naive(mtime): - return mtime - return localtime(mtime).replace(tzinfo=None) + return mtime if is_naive(mtime) else localtime(mtime).replace(tzinfo=None) def _strip_signing_parameters(self, url): # Boto3 does not currently support generating URLs that are unsigned. Instead we From 9bd76bc1cf4fbd52e266ca8751f2a584d2f02d9d Mon Sep 17 00:00:00 2001 From: Josh Schneier Date: Thu, 12 Jan 2017 13:43:41 -0300 Subject: [PATCH 046/174] Add CHANGELOG for #235 and link for #238 --- CHANGELOG.rst | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.rst b/CHANGELOG.rst index 9fdf5c33e..21627b301 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -5,9 +5,14 @@ django-storages change log ****************** * Actually use ``SFTP_STORAGE_HOST`` in ``SFTPStorage`` backend (`#204`_ thanks @jbittel) -* Fix ``S3Boto3Storage`` to avoid race conditions in a multi-threaded WSGI environment +* Fix ``S3Boto3Storage`` to avoid race conditions in a multi-threaded WSGI environment (`#238`_ thanks @jdufresne) +* Fix trying to localize a naive datetime when ``settings.USE_TZ`` is ``False`` in ``S3Boto3Storage.modified_time``. + (thanks to @tomchuk and @piglei for the reports and the patches, `#235`_, `#234`_) .. _#204: https://github.com/jschneier/django-storages/pull/204 +.. _#238: https://github.com/jschneier/django-storages/pull/238 +.. _#234: https://github.com/jschneier/django-storages/issues/234 +.. _#235: https://github.com/jschneier/django-storages/pull/235 1.5.1 (2016-09-13) ****************** From 630825b3a7afed20c5a63f2668e700622bc133eb Mon Sep 17 00:00:00 2001 From: Sam Bolgert Date: Thu, 12 Jan 2017 11:37:10 -0800 Subject: [PATCH 047/174] Fix _get_or_create_bucket to actually create bucket (#196) --- storages/backends/s3boto3.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/storages/backends/s3boto3.py b/storages/backends/s3boto3.py index 67391cef6..1ada4936e 100644 --- a/storages/backends/s3boto3.py +++ b/storages/backends/s3boto3.py @@ -347,6 +347,8 @@ def _get_or_create_bucket(self, name): "region than we are connecting to. Set " "the region to connect to by setting " "AWS_S3_REGION_NAME to the correct region." % name) + + elif err.response['ResponseMetadata']['HTTPStatusCode'] == 404: # Notes: When using the us-east-1 Standard endpoint, you can create # buckets in other regions. The same is not true when hitting region specific # endpoints. However, when you create the bucket not in the same region, the From 28423789c5a4e244fab50defb248cb97a1c7e166 Mon Sep 17 00:00:00 2001 From: Josh Schneier Date: Thu, 12 Jan 2017 16:39:21 -0300 Subject: [PATCH 048/174] CHANGELOG for #196 --- CHANGELOG.rst | 2 ++ 1 file changed, 2 insertions(+) diff --git a/CHANGELOG.rst b/CHANGELOG.rst index 21627b301..35fde0d9b 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -8,11 +8,13 @@ django-storages change log * Fix ``S3Boto3Storage`` to avoid race conditions in a multi-threaded WSGI environment (`#238`_ thanks @jdufresne) * Fix trying to localize a naive datetime when ``settings.USE_TZ`` is ``False`` in ``S3Boto3Storage.modified_time``. (thanks to @tomchuk and @piglei for the reports and the patches, `#235`_, `#234`_) +* Fix automatic bucket creation in ``S3Boto3Storage`` when ``AWS_AUTO_CREATE_BUCKET`` is ``True`` (`#196`_ thanks @linuxlewis) .. _#204: https://github.com/jschneier/django-storages/pull/204 .. _#238: https://github.com/jschneier/django-storages/pull/238 .. _#234: https://github.com/jschneier/django-storages/issues/234 .. _#235: https://github.com/jschneier/django-storages/pull/235 +.. _#196: https://github.com/jschneier/django-storages/pull/196 1.5.1 (2016-09-13) ****************** From 4de6387eab7be16a9efd97684ce4db4d2c990288 Mon Sep 17 00:00:00 2001 From: Tom Chapin Date: Fri, 13 Jan 2017 09:56:26 -0700 Subject: [PATCH 049/174] Adding instructions for s3boto3 usage (#222) --- docs/backends/amazon-S3.rst | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/docs/backends/amazon-S3.rst b/docs/backends/amazon-S3.rst index 17118b078..70f30b618 100644 --- a/docs/backends/amazon-S3.rst +++ b/docs/backends/amazon-S3.rst @@ -13,6 +13,11 @@ Settings To use s3boto set:: DEFAULT_FILE_STORAGE = 'storages.backends.s3boto.S3BotoStorage' + +To use the boto3 version of the backend:: + + DEFAULT_FILE_STORAGE = 'storages.backends.s3boto3.S3Boto3Storage' + To allow ``django-admin.py`` collectstatic to automatically put your static files in your bucket set the following in your settings.py:: From cdec7a29d88ac54d76b338f37e7842325d77b255 Mon Sep 17 00:00:00 2001 From: Josh Schneier Date: Fri, 13 Jan 2017 14:49:34 -0300 Subject: [PATCH 050/174] Clarify which S3 backend should be used and remove some legacy documentation --- docs/backends/amazon-S3.rst | 60 ++++++++++--------------------------- 1 file changed, 15 insertions(+), 45 deletions(-) diff --git a/docs/backends/amazon-S3.rst b/docs/backends/amazon-S3.rst index 70f30b618..78e14b903 100644 --- a/docs/backends/amazon-S3.rst +++ b/docs/backends/amazon-S3.rst @@ -4,24 +4,30 @@ Amazon S3 Usage ***** -There is one backend for interacting with S3 based on the boto library. A legacy backend backed on the Amazon S3 Python library was removed in version 1.2. -Another for interacting via Boto3 was added in version 1.5 +There are two backends for interacting with Amazon's S3, one based +on boto3 and an older one based on boto3. It is highly recommended that all +new projects (at least) use the boto3 backend since it has many bug fixes +and performance improvements over boto and is the future; boto is lightly +maintained if at all. The boto based backed will continue to be maintained +for the forseeable future. + +For historical completeness an extreme legacy backend was removed +in version 1.2 Settings -------- -To use s3boto set:: - - DEFAULT_FILE_STORAGE = 'storages.backends.s3boto.S3BotoStorage' - -To use the boto3 version of the backend:: +To use boto3 set:: DEFAULT_FILE_STORAGE = 'storages.backends.s3boto3.S3Boto3Storage' - + +To use the boto version of the backend set:: + + DEFAULT_FILE_STORAGE = 'storages.backends.s3boto.S3BotoStorage' To allow ``django-admin.py`` collectstatic to automatically put your static files in your bucket set the following in your settings.py:: - STATICFILES_STORAGE = 'storages.backends.s3boto.S3BotoStorage' + STATICFILES_STORAGE = 'storages.backends.s3boto.S3Boto3Storage' Available are numerous settings. It should be especially noted the following: @@ -80,42 +86,6 @@ origin manually for this to work. If you need to use multiple storages that are served via CloudFront, pass the `custom_domain` parameter to their constructors. -Fields ------- - -Once you're done, default_storage will be the S3 storage:: - - >>> from django.core.files.storage import default_storage - >>> print default_storage.__class__ - - -The above doesn't seem to be true for django 1.3+ instead look at:: - - >>> from django.core.files.storage import default_storage - >>> print default_storage.connection - S3Connection:s3.amazonaws.com - -This way, if you define a new FileField, it will use the S3 storage:: - - >>> from django.db import models - >>> class Resume(models.Model): - ... pdf = models.FileField(upload_to='pdfs') - ... photos = models.ImageField(upload_to='photos') - ... - >>> resume = Resume() - >>> print resume.pdf.storage - - -Tests -***** - -Initialization:: - - >>> from django.core.files.storage import default_storage - >>> from django.core.files.base import ContentFile - >>> from django.core.cache import cache - >>> from models import MyStorage - Storage ------- From a447233a3b88e46ac6fa01ad58945e5903f2dac8 Mon Sep 17 00:00:00 2001 From: Josh Schneier Date: Fri, 13 Jan 2017 14:52:42 -0300 Subject: [PATCH 051/174] Document AWS_LOCATION. Fixes #224 --- docs/backends/amazon-S3.rst | 3 +++ 1 file changed, 3 insertions(+) diff --git a/docs/backends/amazon-S3.rst b/docs/backends/amazon-S3.rst index 78e14b903..c3d08be6f 100644 --- a/docs/backends/amazon-S3.rst +++ b/docs/backends/amazon-S3.rst @@ -70,6 +70,9 @@ Available are numerous settings. It should be especially noted the following: ``AWS_S3_FILE_OVERWRITE`` (optional: default is ``True``) By default files with the same name will overwrite each other. Set this to ``False`` to have extra characters appended. +``AWS_LOCATION`` (optional: default is `''`) + A path prefix that will be prepended to all uploads + .. _query parameter authentication: https://docs.aws.amazon.com/AmazonS3/latest/API/sigv4-query-string-auth.html CloudFront From 35bf3de08708c4d0f259a0956da295ecb977d157 Mon Sep 17 00:00:00 2001 From: Josh Schneier Date: Fri, 13 Jan 2017 14:53:50 -0300 Subject: [PATCH 052/174] Note the documentation improvement in CHANGELOG --- CHANGELOG.rst | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.rst b/CHANGELOG.rst index 35fde0d9b..d94a9fc98 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -9,6 +9,7 @@ django-storages change log * Fix trying to localize a naive datetime when ``settings.USE_TZ`` is ``False`` in ``S3Boto3Storage.modified_time``. (thanks to @tomchuk and @piglei for the reports and the patches, `#235`_, `#234`_) * Fix automatic bucket creation in ``S3Boto3Storage`` when ``AWS_AUTO_CREATE_BUCKET`` is ``True`` (`#196`_ thanks @linuxlewis) +* Improve the documentation for the S3 backends (thanks to various contributors!) .. _#204: https://github.com/jschneier/django-storages/pull/204 .. _#238: https://github.com/jschneier/django-storages/pull/238 From 72cd5d62d5baea47884215efb4a296b586b84c7b Mon Sep 17 00:00:00 2001 From: Josh Schneier Date: Fri, 13 Jan 2017 14:54:18 -0300 Subject: [PATCH 053/174] Release version 1.5.2 --- CHANGELOG.rst | 2 +- storages/__init__.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.rst b/CHANGELOG.rst index d94a9fc98..9755a9e54 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -1,7 +1,7 @@ django-storages change log ========================== -1.5.2 (XXXX-XX-XX) +1.5.2 (2017-01-13) ****************** * Actually use ``SFTP_STORAGE_HOST`` in ``SFTPStorage`` backend (`#204`_ thanks @jbittel) diff --git a/storages/__init__.py b/storages/__init__.py index 51ed7c486..c3b384154 100644 --- a/storages/__init__.py +++ b/storages/__init__.py @@ -1 +1 @@ -__version__ = '1.5.1' +__version__ = '1.5.2' From 61b26e1496ba8ca7c4834e021e7e3ef9209ec741 Mon Sep 17 00:00:00 2001 From: Jon Dufresne Date: Sat, 14 Jan 2017 15:44:42 -0800 Subject: [PATCH 054/174] Use Travis's builtin support for pip cache (#247) See docs: https://docs.travis-ci.com/user/caching/#pip-cache --- .travis.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.travis.yml b/.travis.yml index 205ed3a13..da94abbce 100644 --- a/.travis.yml +++ b/.travis.yml @@ -1,5 +1,6 @@ sudo: false language: python +cache: pip python: - 3.5 From 27479d25d92034281c2bbf44b0a07c1f0f5c99d8 Mon Sep 17 00:00:00 2001 From: Mohamad Nour Chawich Date: Sun, 15 Jan 2017 00:45:02 +0100 Subject: [PATCH 055/174] Fix typos in Amazon S3 documentation (#246) --- docs/backends/amazon-S3.rst | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/backends/amazon-S3.rst b/docs/backends/amazon-S3.rst index c3d08be6f..1b6856b02 100644 --- a/docs/backends/amazon-S3.rst +++ b/docs/backends/amazon-S3.rst @@ -5,7 +5,7 @@ Usage ***** There are two backends for interacting with Amazon's S3, one based -on boto3 and an older one based on boto3. It is highly recommended that all +on boto3 and an older one based on boto. It is highly recommended that all new projects (at least) use the boto3 backend since it has many bug fixes and performance improvements over boto and is the future; boto is lightly maintained if at all. The boto based backed will continue to be maintained @@ -27,7 +27,7 @@ To use the boto version of the backend set:: To allow ``django-admin.py`` collectstatic to automatically put your static files in your bucket set the following in your settings.py:: - STATICFILES_STORAGE = 'storages.backends.s3boto.S3Boto3Storage' + STATICFILES_STORAGE = 'storages.backends.s3boto3.S3Boto3Storage' Available are numerous settings. It should be especially noted the following: From c75e3c81d9afd61b5cedf5ce0b5307f71927a009 Mon Sep 17 00:00:00 2001 From: Oon Arfiandwi Date: Mon, 23 Jan 2017 06:27:50 +0700 Subject: [PATCH 056/174] add apache-libcloud installation note I try to configure django-storages to use google cloud storage refer to this file, but I think it would be better to add apache-libcloud installation on this guideline, so noob people like me won't need to have the exception first because know that must install apache-libcloud first. --- docs/backends/apache_libcloud.rst | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/docs/backends/apache_libcloud.rst b/docs/backends/apache_libcloud.rst index 0a1dc1d2c..7ea5ef0bd 100644 --- a/docs/backends/apache_libcloud.rst +++ b/docs/backends/apache_libcloud.rst @@ -6,6 +6,10 @@ It aims to provide a consistent API for dealing with cloud storage (and, more broadly, the many other services provided by cloud providers, such as device provisioning, load balancer configuration, and DNS configuration). +Use pip to install apache-libcloud from PyPI:: + + pip install apache-libcloud + As of v0.10.1, Libcloud supports the following cloud storage providers: * `Amazon S3`_ * `Google Cloud Storage`_ From b6348d2a753f08527fd6074f0822bdc3a59ba902 Mon Sep 17 00:00:00 2001 From: Andrey Kostakov Date: Mon, 23 Jan 2017 23:39:47 +0300 Subject: [PATCH 057/174] Change badge image url to shields.io (#253) Because pypip.in is down. --- README.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.rst b/README.rst index 39b5fc582..b1f07923d 100644 --- a/README.rst +++ b/README.rst @@ -6,7 +6,7 @@ django-storages :target: https://travis-ci.org/jschneier/django-storages :alt: Build Status -.. image:: https://pypip.in/v/django-storages/badge.png +.. image:: https://img.shields.io/pypi/v/django-storages.png :target: https://pypi.python.org/pypi/django-storages :alt: PyPI Version From 3e5a0c76db455256470171341af4d15e3d5e98a2 Mon Sep 17 00:00:00 2001 From: Aarni Koskela Date: Tue, 31 Jan 2017 11:07:57 +0200 Subject: [PATCH 058/174] Update documentation copyright years --- docs/conf.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/conf.py b/docs/conf.py index c1052fa73..85cfc7942 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -42,7 +42,7 @@ # General information about the project. project = u'django-storages' -copyright = u'2011-2013, David Larlet, et. al.' +copyright = u'2011-2017, David Larlet, et. al.' # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the @@ -223,7 +223,7 @@ epub_title = u'django-storages' epub_author = u'David Larlet, et. al.' epub_publisher = u'David Larlet, et. al.' -epub_copyright = u'2011-2013, David Larlet, et. al.' +epub_copyright = u'2011-2017, David Larlet, et. al.' # The language of the text. It defaults to the language option # or en if the language is not set. From 7bb7e2b4d33b87e8706d38a91e2296f1cf3c89f1 Mon Sep 17 00:00:00 2001 From: Matt Braymer-Hayes Date: Tue, 28 Feb 2017 10:16:24 -0800 Subject: [PATCH 059/174] #257: S3Boto3Storage bucket_params (#258) * #257 Use bucket_params when creating bucket * Add self to AUTHORS --- AUTHORS | 1 + storages/backends/s3boto3.py | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/AUTHORS b/AUTHORS index 530f21745..0dc804966 100644 --- a/AUTHORS +++ b/AUTHORS @@ -26,6 +26,7 @@ By order of apparition, thanks: * EunPyo (Andrew) Hong (Azure) * Michael Barrientos (S3 with Boto3) * piglei (patches) + * Matt Braymer-Hayes (S3 with Boto3) Extra thanks to Marty for adding this in Django, you can buy his very interesting book (Pro Django). diff --git a/storages/backends/s3boto3.py b/storages/backends/s3boto3.py index 1ada4936e..a17885fd9 100644 --- a/storages/backends/s3boto3.py +++ b/storages/backends/s3boto3.py @@ -365,7 +365,7 @@ def _get_or_create_bucket(self, name): if region_name != 'us-east-1': bucket_params['CreateBucketConfiguration'] = { 'LocationConstraint': region_name} - bucket.create(ACL=self.bucket_acl) + bucket.create(**bucket_params) else: raise ImproperlyConfigured("Bucket %s does not exist. Buckets " "can be automatically created by " From 06e9224f086b745d140e805a545ef9ac42810fec Mon Sep 17 00:00:00 2001 From: Josh Schneier Date: Tue, 28 Feb 2017 15:48:44 -0300 Subject: [PATCH 060/174] Add a test that creating a bucket passes in all params: ref #257 --- tests/test_s3boto3.py | 24 ++++++++++++++++-------- 1 file changed, 16 insertions(+), 8 deletions(-) diff --git a/tests/test_s3boto3.py b/tests/test_s3boto3.py index 09e823b4a..0f5a6a9b7 100644 --- a/tests/test_s3boto3.py +++ b/tests/test_s3boto3.py @@ -193,14 +193,22 @@ def test_storage_open_write(self): multipart.complete.assert_called_once_with( MultipartUpload={'Parts': [{'ETag': '123', 'PartNumber': 1}]}) - # def test_storage_exists_bucket(self): - # bucket = self.storage._connection.Bucket.return_value - # bucket.meta.client.head_bucket.side_effect = ClientError( - # {'Error': {'Code': 123, 'Message': 'Fake'}}, 'load') - # self.assertFalse(self.storage.exists('')) - # - # self.storage.bucket.meta.client.head_bucket.side_effect = None - # self.assertTrue(self.storage.exists('')) + def test_auto_creating_bucket(self): + self.storage.auto_create_bucket = True + Bucket = mock.MagicMock() + self.storage._connection.Bucket.return_value = Bucket + self.storage._connection.meta.client.meta.region_name = 'sa-east-1' + + Bucket.meta.client.head_bucket.side_effect = ClientError({'Error': {}, + 'ResponseMetadata': {'HTTPStatusCode': 404}}, + 'head_bucket') + self.storage._get_or_create_bucket('testbucketname') + Bucket.create.assert_called_once_with( + ACL='public-read', + CreateBucketConfiguration={ + 'LocationConstraint': 'sa-east-1', + } + ) def test_storage_exists(self): obj = self.storage.bucket.Object.return_value From 3e7fff0b2690fa3daeaa8da05c764cbff3dba924 Mon Sep 17 00:00:00 2001 From: Josh Schneier Date: Tue, 28 Feb 2017 15:58:53 -0300 Subject: [PATCH 061/174] Add #257 to CHANGELOG --- CHANGELOG.rst | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/CHANGELOG.rst b/CHANGELOG.rst index 9755a9e54..0fcb22220 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -1,6 +1,14 @@ django-storages change log ========================== +1.5.3 (XXXX-XX-XX) +****************** + +* Pass in the location constraint when auto creating a bucket (`#257`_, `#258`_ thanks @mattayes) + +.. _#257: https://github.com/jschneier/django-storages/issues/257 +.. _#258: https://github.com/jschneier/django-storages/pull/258 + 1.5.2 (2017-01-13) ****************** From 8b506f3c1f4a02ccbce41fcb04f226e7b58a517a Mon Sep 17 00:00:00 2001 From: Josh Schneier Date: Tue, 28 Feb 2017 16:56:20 -0300 Subject: [PATCH 062/174] Document AWS_S3_OBJECT_PARAMETERS and mention AWS_HEADERS is boto backend only --- docs/backends/amazon-S3.rst | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/docs/backends/amazon-S3.rst b/docs/backends/amazon-S3.rst index 1b6856b02..1edfcab86 100644 --- a/docs/backends/amazon-S3.rst +++ b/docs/backends/amazon-S3.rst @@ -46,7 +46,7 @@ Available are numerous settings. It should be especially noted the following: ``AWS_AUTO_CREATE_BUCKET`` (optional) If set to ``True`` the bucket specified in ``AWS_STORAGE_BUCKET_NAME`` is automatically created. -``AWS_HEADERS`` (optional) +``AWS_HEADERS`` (optional - boto only, for boto3 see ``AWS_S3_OBJECT_PARAMETERS``) If you'd like to set headers sent with each file of the storage:: # see http://developer.yahoo.com/performance/rules.html#expires @@ -55,6 +55,13 @@ Available are numerous settings. It should be especially noted the following: 'Cache-Control': 'max-age=86400', } +``AWS_S3_OBJECT_PARAMETERS`` (optional - boto3 only) + Use this to set arbitrary parameters on your object (such as Cache-Control):: + + AWS_S3_OBJECT_PARAMETERS = { + 'Cache-Control': 'max-age=86400', + } + ``AWS_QUERYSTRING_AUTH`` (optional; default is ``True``) Setting ``AWS_QUERYSTRING_AUTH`` to ``False`` removes `query parameter authentication`_ from generated URLs. This can be useful if your S3 buckets are From bce67edd847f626014974a2d2516268dc27d1e99 Mon Sep 17 00:00:00 2001 From: Josh Schneier Date: Wed, 1 Mar 2017 21:48:02 -0300 Subject: [PATCH 063/174] Use the svg badges in the readme --- README.rst | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/README.rst b/README.rst index b1f07923d..660f592d1 100644 --- a/README.rst +++ b/README.rst @@ -2,14 +2,14 @@ django-storages =============== -.. image:: https://travis-ci.org/jschneier/django-storages.png?branch=master - :target: https://travis-ci.org/jschneier/django-storages - :alt: Build Status -.. image:: https://img.shields.io/pypi/v/django-storages.png +.. image:: https://img.shields.io/pypi/v/django-storages.svg :target: https://pypi.python.org/pypi/django-storages :alt: PyPI Version +.. image:: https://travis-ci.org/jschneier/django-storages.svg?branch=master + :target: https://travis-ci.org/jschneier/django-storages + :alt: Build Status Installation ============ From e2edbb03b9b6ad5f2c0eaa32634447c314fcb2fd Mon Sep 17 00:00:00 2001 From: Josh Schneier Date: Fri, 3 Mar 2017 14:54:35 -0300 Subject: [PATCH 064/174] Fix rst syntax issue --- docs/backends/azure.rst | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/backends/azure.rst b/docs/backends/azure.rst index da4fa765a..b9fa2734b 100644 --- a/docs/backends/azure.rst +++ b/docs/backends/azure.rst @@ -1,5 +1,5 @@ Azure Storage -=========== +============= A custom storage system for Django using Windows Azure Storage backend. @@ -15,7 +15,7 @@ Add to your requirements file:: Settings -******* +******** To use `AzureStorage` set:: From c2e7dacc01035765984b487e29695527716b6c4c Mon Sep 17 00:00:00 2001 From: Josh Schneier Date: Fri, 3 Mar 2017 14:55:36 -0300 Subject: [PATCH 065/174] Document AWS_IS_GZIPPED and GZIP_CONTENT_TYPES Closes #205 --- docs/backends/amazon-S3.rst | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/docs/backends/amazon-S3.rst b/docs/backends/amazon-S3.rst index 1edfcab86..ba5cfe583 100644 --- a/docs/backends/amazon-S3.rst +++ b/docs/backends/amazon-S3.rst @@ -80,6 +80,12 @@ Available are numerous settings. It should be especially noted the following: ``AWS_LOCATION`` (optional: default is `''`) A path prefix that will be prepended to all uploads +``AWS_IS_GZIPPED`` (optional: default is ``False``) + Whether or not to enable gzipping of content types specified by ``GZIP_CONTENT_TYPES`` + +``GZIP_CONTENT_TYPES`` (optional: default is ``text/css``, ``text/javascript``, ``application/javascript``, ``application/x-javascript``, ``image/svg+xml``) + When ``AWS_IS_GZIPPED`` is set to ``True`` the content types which will be gzipped + .. _query parameter authentication: https://docs.aws.amazon.com/AmazonS3/latest/API/sigv4-query-string-auth.html CloudFront From 558e872ec04d5c9c1508001998259f24af71eec5 Mon Sep 17 00:00:00 2001 From: Josh Schneier Date: Fri, 3 Mar 2017 17:13:14 -0300 Subject: [PATCH 066/174] Use the univeral flag with bdist_wheel --- setup.cfg | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.cfg b/setup.cfg index 7c964b49e..3c6e79cf3 100644 --- a/setup.cfg +++ b/setup.cfg @@ -1,2 +1,2 @@ -[wheel] +[bdist_wheel] universal=1 From 969aa86d43351cef3174de1e1f5bfe18b1dc9734 Mon Sep 17 00:00:00 2001 From: Josh Schneier Date: Mon, 13 Mar 2017 14:26:24 -0300 Subject: [PATCH 067/174] Minor code cleanup --- storages/backends/s3boto.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/storages/backends/s3boto.py b/storages/backends/s3boto.py index 958774ea3..b36ebde76 100644 --- a/storages/backends/s3boto.py +++ b/storages/backends/s3boto.py @@ -348,9 +348,8 @@ def _clean_name(self, name): # a workaround here. if name.endswith('/') and not clean_name.endswith('/'): # Add a trailing slash as it was stripped. - return clean_name + '/' - else: - return clean_name + clean_name += '/' + return clean_name def _normalize_name(self, name): """ From 3c3746b7ad0caf696e77f4a2c1eb4eb62694ecf6 Mon Sep 17 00:00:00 2001 From: Josh Schneier Date: Mon, 13 Mar 2017 15:51:20 -0400 Subject: [PATCH 068/174] Remove a whole bunch of deprecated storage backends (#280) --- docs/backends/couchdb.rst | 5 - docs/backends/database.rst | 59 ------------ docs/backends/image.rst | 5 - docs/backends/mogilefs.rst | 67 -------------- docs/backends/overwrite.rst | 5 - docs/backends/symlinkcopy.rst | 6 -- setup.py | 2 +- storages/backends/couchdb.py | 144 ----------------------------- storages/backends/database.py | 141 ---------------------------- storages/backends/hashpath.py | 53 ----------- storages/backends/image.py | 64 ------------- storages/backends/mogile.py | 124 ------------------------- storages/backends/overwrite.py | 29 ------ storages/backends/symlinkorcopy.py | 71 -------------- tests/test_hashpath.py | 35 ------- 15 files changed, 1 insertion(+), 809 deletions(-) delete mode 100644 docs/backends/couchdb.rst delete mode 100644 docs/backends/database.rst delete mode 100644 docs/backends/image.rst delete mode 100644 docs/backends/mogilefs.rst delete mode 100644 docs/backends/overwrite.rst delete mode 100644 docs/backends/symlinkcopy.rst delete mode 100644 storages/backends/couchdb.py delete mode 100644 storages/backends/database.py delete mode 100644 storages/backends/hashpath.py delete mode 100644 storages/backends/image.py delete mode 100644 storages/backends/mogile.py delete mode 100644 storages/backends/overwrite.py delete mode 100644 storages/backends/symlinkorcopy.py delete mode 100644 tests/test_hashpath.py diff --git a/docs/backends/couchdb.rst b/docs/backends/couchdb.rst deleted file mode 100644 index f93761b9d..000000000 --- a/docs/backends/couchdb.rst +++ /dev/null @@ -1,5 +0,0 @@ -CouchDB -======= - -A custom storage system for Django with CouchDB backend. - diff --git a/docs/backends/database.rst b/docs/backends/database.rst deleted file mode 100644 index e2f40413a..000000000 --- a/docs/backends/database.rst +++ /dev/null @@ -1,59 +0,0 @@ -Database -======== - -Class DatabaseStorage can be used with either FileField or ImageField. It can be used to map filenames to database blobs: so you have to use it with a special additional table created manually. The table should contain a pk-column for filenames (better to use the same type that FileField uses: nvarchar(100)), blob field (image type for example) and size field (bigint). You can't just create blob column in the same table, where you defined FileField, since there is no way to find required row in the save() method. Also size field is required to obtain better perfomance (see size() method). - -So you can use it with different FileFields and even with different "upload_to" variables used. Thus it implements a kind of root filesystem, where you can define dirs using "upload_to" with FileField and store any files in these dirs. - -It uses either settings.DB_FILES_URL or constructor param 'base_url' (see __init__()) to create urls to files. Base url should be mapped to view that provides access to files. To store files in the same table, where FileField is defined you have to define your own field and provide extra argument (e.g. pk) to save(). - -Raw sql is used for all operations. In constructor or in DB_FILES of settings.py () you should specify a dictionary with db_table, fname_column, blob_column, size_column and 'base_url'. For example I just put to the settings.py the following line:: - - DB_FILES = { - 'db_table': 'FILES', - 'fname_column': 'FILE_NAME', - 'blob_column': 'BLOB', - 'size_column': 'SIZE', - 'base_url': 'http://localhost/dbfiles/' - } - -And use it with ImageField as following:: - - player_photo = models.ImageField(upload_to="player_photos", storage=DatabaseStorage() ) - -DatabaseStorage class uses your settings.py file to perform custom connection to your database. - -The reason to use custom connection: http://code.djangoproject.com/ticket/5135 Connection string looks like:: - - cnxn = pyodbc.connect('DRIVER={SQL Server};SERVER=localhost;DATABASE=testdb;UID=me;PWD=pass') - -It's based on pyodbc module, so can be used with any database supported by pyodbc. I've tested it with MS Sql Express 2005. - -Note: It returns special path, which should be mapped to special view, which returns requested file:: - - def image_view(request, filename): - import os - from django.http import HttpResponse - from django.conf import settings - from django.utils._os import safe_join - from filestorage import DatabaseStorage - from django.core.exceptions import ObjectDoesNotExist - - storage = DatabaseStorage() - - try: - image_file = storage.open(filename, 'rb') - file_content = image_file.read() - except: - filename = 'no_image.gif' - path = safe_join(os.path.abspath(settings.MEDIA_ROOT), filename) - if not os.path.exists(path): - raise ObjectDoesNotExist - no_image = open(path, 'rb') - file_content = no_image.read() - - response = HttpResponse(file_content, mimetype="image/jpeg") - response['Content-Disposition'] = 'inline; filename=%s'%filename - return response - -.. note:: If filename exist, blob will be overwritten, to change this remove get_available_name(self, name), so Storage.get_available_name(self, name) will be used to generate new filename. diff --git a/docs/backends/image.rst b/docs/backends/image.rst deleted file mode 100644 index b03e1d4ec..000000000 --- a/docs/backends/image.rst +++ /dev/null @@ -1,5 +0,0 @@ -Image -===== - -A custom FileSystemStorage made for normalizing extensions. It lets PIL look at the file to determine the format and append an always lower-case extension based on the results. - diff --git a/docs/backends/mogilefs.rst b/docs/backends/mogilefs.rst deleted file mode 100644 index 7d868fec4..000000000 --- a/docs/backends/mogilefs.rst +++ /dev/null @@ -1,67 +0,0 @@ -MogileFS -======== - -This storage allows you to use MogileFS, it comes from this blog post. - -The MogileFS storage backend is fairly simple: it uses URLs (or, rather, parts of URLs) as keys into the mogile database. When the user requests a file stored by mogile (say, an avatar), the URL gets passed to a view which, using a client to the mogile tracker, retrieves the "correct" path (the path that points to the actual file data). The view will then either return the path(s) to perlbal to reproxy, or, if you're not using perlbal to reproxy (which you should), it serves the data of the file directly from django. - -To use `MogileFSStorage` set:: - - DEFAULT_FILE_STORAGE = 'storages.backends.mogile.MogileFSStorage' - -The following settings are available: - -``MOGILEFS_DOMAIN`` - The mogile domain that files should read from/written to, e.g "production" - -``MOGILEFS_TRACKERS`` - A list of trackers to connect to, e.g. ["foo.sample.com:7001", "bar.sample.com:7001"] - -``MOGILEFS_MEDIA_URL`` (optional) - The prefix for URLs that point to mogile files. This is used in a similar way to ``MEDIA_URL``, e.g. "/mogilefs/" - -``SERVE_WITH_PERLBAL`` - Boolean that, when True, will pass the paths back in the response in the ``X-REPROXY-URL`` header. If False, django will serve all mogile media files itself (bad idea for production, but useful if you're testing on a setup that doesn't have perlbal running) - -Getting files into mogile -************************* - -The great thing about file backends is that we just need to specify the backend in the model file and everything is taken care for us - all the default save() methods work correctly. - -For Fluther, we have two main media types we use mogile for: avatars and thumbnails. Mogile defines "classes" that dictate how each type of file is replicated - so you can make sure you have 3 copies of the original avatar but only 1 of the thumbnail. - -In order for classes to behave nicely with the backend framework, we've had to do a little tomfoolery. (This is something that may change in future versions of the filestorage framework). - -Here's what the models.py file looks like for the avatars:: - - from django.core.filestorage import storage - - # TODO: Find a better way to deal with classes. Maybe a generator? - class AvatarStorage(storage.__class__): - mogile_class = 'avatar' - - class ThumbnailStorage(storage.__class__): - mogile_class = 'thumb' - - class Avatar(models.Model): - user = models.ForeignKey(User, null=True, blank=True) - image = models.ImageField(storage=AvatarStorage()) - thumb = models.ImageField(storage=ThumbnailStorage()) - -Each of the custom storage classes defines a class attribute which gets passed to the mogile backend behind the scenes. If you don't want to worry about mogile classes, don't need to define a custom storage engine or specify it in the field - the default should work just fine. - -Serving files from mogile -************************* - -Now, all we need to do is plug in the view that serves up mogile data. - -Here's what we use:: - - urlpatterns += patterns(", - (r'^%s(?P.*)' % settings.MOGILEFS_MEDIA_URL[1:], - 'MogileFSStorage.serve_mogilefs_file') - ) - -Any url beginning with the value of ``MOGILEFS_MEDIA_URL`` will get passed to our view. Since ``MOGILEFS_MEDIA_URL`` requires a leading slash (like ``MEDIA_URL``), we strip that off and pass the rest of the url over to the view. - -That's it! Happy mogiling! diff --git a/docs/backends/overwrite.rst b/docs/backends/overwrite.rst deleted file mode 100644 index 66aa87538..000000000 --- a/docs/backends/overwrite.rst +++ /dev/null @@ -1,5 +0,0 @@ -Overwrite -========= - -This is a simple implementation overwrite of the FileSystemStorage. It removes the addition of an '_' to the filename if the file already exists in the storage system. I needed a model in the admin area to act exactly like a file system (overwriting the file if it already exists). - diff --git a/docs/backends/symlinkcopy.rst b/docs/backends/symlinkcopy.rst deleted file mode 100644 index be4abe18e..000000000 --- a/docs/backends/symlinkcopy.rst +++ /dev/null @@ -1,6 +0,0 @@ -Symlink or copy -=============== - -Stores symlinks to files instead of actual files whenever possible - -When a file that's being saved is currently stored in the symlink_within directory, then symlink the file. Otherwise, copy the file. diff --git a/setup.py b/setup.py index a1bf39b0b..bdecbba22 100644 --- a/setup.py +++ b/setup.py @@ -18,7 +18,7 @@ def get_requirements_tests(): author='Josh Schneier', author_email='josh.schneier@gmail.com', license='BSD', - description='Support for many storages (S3, Libcloud, etc in Django.', + description='Support for many storage backends in Django', long_description=read('README.rst') + '\n\n' + read('CHANGELOG.rst'), url='https://github.com/jschneier/django-storages', classifiers=[ diff --git a/storages/backends/couchdb.py b/storages/backends/couchdb.py deleted file mode 100644 index 16ef41e45..000000000 --- a/storages/backends/couchdb.py +++ /dev/null @@ -1,144 +0,0 @@ -""" -This is a Custom Storage System for Django with CouchDB backend. -Created by Christian Klein. -(c) Copyright 2009 HUDORA GmbH. All Rights Reserved. -""" -import os -import warnings - -from django.conf import settings -from django.core.files import File -from django.core.files.storage import Storage -from django.core.exceptions import ImproperlyConfigured -from django.utils.deconstruct import deconstructible -from django.utils.six.moves.urllib import parse as urlparse -from django.utils.six import BytesIO - -try: - import couchdb -except ImportError: - raise ImproperlyConfigured("Could not load couchdb dependency.\ - \nSee http://code.google.com/p/couchdb-python/") - -DEFAULT_SERVER = getattr(settings, 'COUCHDB_DEFAULT_SERVER', 'http://couchdb.local:5984') -STORAGE_OPTIONS = getattr(settings, 'COUCHDB_STORAGE_OPTIONS', {}) - - -warnings.warn( - 'CouchDBStorage is unmaintained and will be removed in the next version of django-storages ' - 'See https://github.com/jschneier/django-storages/issues/202', - PendingDeprecationWarning -) - - -@deconstructible -class CouchDBStorage(Storage): - """ - CouchDBStorage - a Django Storage class for CouchDB. - - The CouchDBStorage can be configured in settings.py, e.g.:: - - COUCHDB_STORAGE_OPTIONS = { - 'server': "http://example.org", - 'database': 'database_name' - } - - Alternatively, the configuration can be passed as a dictionary. - """ - def __init__(self, **kwargs): - kwargs.update(STORAGE_OPTIONS) - self.base_url = kwargs.get('server', DEFAULT_SERVER) - server = couchdb.client.Server(self.base_url) - self.db = server[kwargs.get('database')] - - def _put_file(self, name, content): - self.db[name] = {'size': len(content)} - self.db.put_attachment(self.db[name], content, filename='content') - return name - - def get_document(self, name): - return self.db.get(name) - - def _open(self, name, mode='rb'): - couchdb_file = CouchDBFile(name, self, mode=mode) - return couchdb_file - - def _save(self, name, content): - content.open() - if hasattr(content, 'chunks'): - content_str = ''.join(chunk for chunk in content.chunks()) - else: - content_str = content.read() - name = name.replace('/', '-') - return self._put_file(name, content_str) - - def exists(self, name): - return name in self.db - - def size(self, name): - doc = self.get_document(name) - if doc: - return doc['size'] - return 0 - - def url(self, name): - return urlparse.urljoin(self.base_url, - os.path.join(urlparse.quote_plus(self.db.name), - urlparse.quote_plus(name), - 'content')) - - def delete(self, name): - try: - del self.db[name] - except couchdb.client.ResourceNotFound: - raise IOError("File not found: %s" % name) - - #def listdir(self, name): - # _all_docs? - # pass - - -class CouchDBFile(File): - """ - CouchDBFile - a Django File-like class for CouchDB documents. - """ - - def __init__(self, name, storage, mode): - self._name = name - self._storage = storage - self._mode = mode - self._is_dirty = False - - try: - self._doc = self._storage.get_document(name) - - tmp, ext = os.path.split(name) - if ext: - filename = "content." + ext - else: - filename = "content" - attachment = self._storage.db.get_attachment(self._doc, filename=filename) - self.file = BytesIO(attachment) - except couchdb.client.ResourceNotFound: - if 'r' in self._mode: - raise ValueError("The file cannot be reopened.") - else: - self.file = BytesIO() - self._is_dirty = True - - @property - def size(self): - return self._doc['size'] - - def write(self, content): - if 'w' not in self._mode: - raise AttributeError("File was opened for read-only access.") - self.file = BytesIO(content) - self._is_dirty = True - - def close(self): - if self._is_dirty: - self._storage._put_file(self._name, self.file.getvalue()) - self.file.close() - - diff --git a/storages/backends/database.py b/storages/backends/database.py deleted file mode 100644 index 81954fc55..000000000 --- a/storages/backends/database.py +++ /dev/null @@ -1,141 +0,0 @@ -# DatabaseStorage for django. -# 2009 (c) GameKeeper Gambling Ltd, Ivanov E. -import warnings - -from django.conf import settings -from django.core.files import File -from django.core.files.storage import Storage -from django.core.exceptions import ImproperlyConfigured -from django.utils.deconstruct import deconstructible -from django.utils.six import BytesIO -from django.utils.six.moves.urllib import parse as urlparse - -try: - import pyodbc -except ImportError: - raise ImproperlyConfigured("Could not load pyodbc dependency.\ - \nSee https://github.com/mkleehammer/pyodbc") - -REQUIRED_FIELDS = ('db_table', 'fname_column', 'blob_column', 'size_column', 'base_url') -warnings.warn( - 'DatabaseStorage is unmaintained and will be removed in the next version of django-storages.' - 'See https://github.com/jschneier/django-storages/issues/202', - PendingDeprecationWarning -) - - -@deconstructible -class DatabaseStorage(Storage): - """ - Class DatabaseStorage provides storing files in the database. - """ - - def __init__(self, option=settings.DB_FILES): - """Constructor. - - Constructs object using dictionary either specified in contucotr or -in settings.DB_FILES. - - @param option dictionary with 'db_table', 'fname_column', -'blob_column', 'size_column', 'base_url' keys. - - option['db_table'] - Table to work with. - option['fname_column'] - Column in the 'db_table' containing filenames (filenames can -contain pathes). Values should be the same as where FileField keeps -filenames. - It is used to map filename to blob_column. In sql it's simply -used in where clause. - option['blob_column'] - Blob column (for example 'image' type), created manually in the -'db_table', used to store image. - option['size_column'] - Column to store file size. Used for optimization of size() -method (another way is to open file and get size) - option['base_url'] - Url prefix used with filenames. Should be mapped to the view, -that returns an image as result. - """ - - if not option or not all([field in option for field in REQUIRED_FIELDS]): - raise ValueError("You didn't specify required options") - - self.db_table = option['db_table'] - self.fname_column = option['fname_column'] - self.blob_column = option['blob_column'] - self.size_column = option['size_column'] - self.base_url = option['base_url'] - - #get database settings - self.DATABASE_ODBC_DRIVER = settings.DATABASE_ODBC_DRIVER - self.DATABASE_NAME = settings.DATABASE_NAME - self.DATABASE_USER = settings.DATABASE_USER - self.DATABASE_PASSWORD = settings.DATABASE_PASSWORD - self.DATABASE_HOST = settings.DATABASE_HOST - - self.connection = pyodbc.connect('DRIVER=%s;SERVER=%s;DATABASE=%s;UID=%s;PWD=%s'%(self.DATABASE_ODBC_DRIVER,self.DATABASE_HOST,self.DATABASE_NAME, - self.DATABASE_USER, self.DATABASE_PASSWORD) ) - self.cursor = self.connection.cursor() - - def _open(self, name, mode='rb'): - """Open a file from database. - - @param name filename or relative path to file based on base_url. path should contain only "/", but not "\". Apache sends pathes with "/". - If there is no such file in the db, returs None - """ - - assert mode == 'rb', "You've tried to open binary file without specifying binary mode! You specified: %s"%mode - - row = self.cursor.execute("SELECT %s from %s where %s = '%s'"%(self.blob_column,self.db_table,self.fname_column,name) ).fetchone() - if row is None: - return None - inMemFile = BytesIO(row[0]) - inMemFile.name = name - inMemFile.mode = mode - - retFile = File(inMemFile) - return retFile - - def _save(self, name, content): - """Save 'content' as file named 'name'. - - @note '\' in path will be converted to '/'. - """ - - name = name.replace('\\', '/') - binary = pyodbc.Binary(content.read()) - size = len(binary) - - #todo: check result and do something (exception?) if failed. - if self.exists(name): - self.cursor.execute("UPDATE %s SET %s = ?, %s = ? WHERE %s = '%s'"%(self.db_table,self.blob_column,self.size_column,self.fname_column,name), - (binary, size) ) - else: - self.cursor.execute("INSERT INTO %s VALUES(?, ?, ?)"%(self.db_table), (name, binary, size) ) - self.connection.commit() - return name - - def exists(self, name): - row = self.cursor.execute("SELECT %s from %s where %s = '%s'"%(self.fname_column,self.db_table,self.fname_column,name)).fetchone() - return row is not None - - def get_available_name(self, name, max_length=None): - return name - - def delete(self, name): - if self.exists(name): - self.cursor.execute("DELETE FROM %s WHERE %s = '%s'"%(self.db_table,self.fname_column,name)) - self.connection.commit() - - def url(self, name): - if self.base_url is None: - raise ValueError("This file is not accessible via a URL.") - return urlparse.urljoin(self.base_url, name).replace('\\', '/') - - def size(self, name): - row = self.cursor.execute("SELECT %s from %s where %s = '%s'"%(self.size_column,self.db_table,self.fname_column,name)).fetchone() - if row is None: - return 0 - else: - return int(row[0]) diff --git a/storages/backends/hashpath.py b/storages/backends/hashpath.py deleted file mode 100644 index c161cfc84..000000000 --- a/storages/backends/hashpath.py +++ /dev/null @@ -1,53 +0,0 @@ -import errno -import hashlib -import os -import warnings - -from django.core.files.storage import FileSystemStorage -from django.utils.deconstruct import deconstructible -from django.utils.encoding import force_text, force_bytes - -warnings.warn( - 'HashPathStorage is unmaintaiined and will be removed in the next version of django-storages.' - 'See https://github.com/jschneier/django-storages/issues/202', - PendingDeprecationWarning -) - - -@deconstructible -class HashPathStorage(FileSystemStorage): - """ - Creates a hash from the uploaded file to build the path. - """ - - def save(self, name, content, max_length=None): - # Get the content name if name is not given - if name is None: - name = content.name - - # Get the SHA1 hash of the uploaded file - sha1 = hashlib.sha1() - for chunk in content.chunks(): - sha1.update(force_bytes(chunk)) - sha1sum = sha1.hexdigest() - - # Build the new path and split it into directory and filename - name = os.path.join(os.path.split(name)[0], sha1sum[:1], sha1sum[1:2], sha1sum) - dir_name, file_name = os.path.split(name) - - # Return the name if the file is already there - if self.exists(name): - return name - - # Try to create the directory relative to location specified in __init__ - try: - os.makedirs(os.path.join(self.location, dir_name)) - except OSError as e: - if e.errno is not errno.EEXIST: - raise e - - # Save the file - name = self._save(name, content) - - # Store filenames with forward slashes, even on Windows - return force_text(name.replace('\\', '/')) diff --git a/storages/backends/image.py b/storages/backends/image.py deleted file mode 100644 index 22c93a850..000000000 --- a/storages/backends/image.py +++ /dev/null @@ -1,64 +0,0 @@ -import os -import warnings - -from django.core.exceptions import ImproperlyConfigured -from django.core.files.storage import FileSystemStorage -from django.utils.deconstruct import deconstructible - -try: - from PIL import ImageFile as PILImageFile -except ImportError: - raise ImproperlyConfigured("Could not load PIL dependency.\ - \nSee http://www.pythonware.com/products/pil/") - - -warnings.warn( - 'ImageStorage backend is unmaintainted and will be removed in the next django-storages version' - 'See https://github.com/jschneier/django-storages/issues/202', - PendingDeprecationWarning -) - - -@deconstructible -class ImageStorage(FileSystemStorage): - """ - A FileSystemStorage which normalizes extensions for images. - - Comes from http://www.djangosnippets.org/snippets/965/ - """ - - def find_extension(self, format): - """Normalizes PIL-returned format into a standard, lowercase extension.""" - format = format.lower() - - if format == 'jpeg': - format = 'jpg' - - return format - - def save(self, name, content, max_length=None): - dirname = os.path.dirname(name) - basename = os.path.basename(name) - - # Use PIL to determine filetype - - p = PILImageFile.Parser() - while 1: - data = content.read(1024) - if not data: - break - p.feed(data) - if p.image: - im = p.image - break - - extension = self.find_extension(im.format) - - # Does the basename already have an extension? If so, replace it. - # bare as in without extension - bare_basename, _ = os.path.splitext(basename) - basename = bare_basename + '.' + extension - - name = os.path.join(dirname, basename) - return super(ImageStorage, self).save(name, content) - diff --git a/storages/backends/mogile.py b/storages/backends/mogile.py deleted file mode 100644 index d61941943..000000000 --- a/storages/backends/mogile.py +++ /dev/null @@ -1,124 +0,0 @@ -from __future__ import print_function - -import mimetypes -import warnings - -from django.conf import settings -from django.core.cache import cache -from django.utils.deconstruct import deconstructible -from django.utils.text import force_text -from django.http import HttpResponse, HttpResponseNotFound -from django.core.exceptions import ImproperlyConfigured -from django.core.files.storage import Storage - -try: - import mogilefs -except ImportError: - raise ImproperlyConfigured("Could not load mogilefs dependency.\ - \nSee http://mogilefs.pbworks.com/Client-Libraries") - -warnings.warn( - 'MogileFSStorage is unmaintained and will be removed in the next django-storages version' - 'See https://github.com/jschneier/django-storages/issues/202', - PendingDeprecationWarning -) - - -@deconstructible -class MogileFSStorage(Storage): - """MogileFS filesystem storage""" - def __init__(self, base_url=settings.MEDIA_URL): - - # the MOGILEFS_MEDIA_URL overrides MEDIA_URL - if hasattr(settings, 'MOGILEFS_MEDIA_URL'): - self.base_url = settings.MOGILEFS_MEDIA_URL - else: - self.base_url = base_url - - for var in ('MOGILEFS_TRACKERS', 'MOGILEFS_DOMAIN',): - if not hasattr(settings, var): - raise ImproperlyConfigured("You must define %s to use the MogileFS backend." % var) - - self.trackers = settings.MOGILEFS_TRACKERS - self.domain = settings.MOGILEFS_DOMAIN - self.client = mogilefs.Client(self.domain, self.trackers) - - def get_mogile_paths(self, filename): - return self.client.get_paths(filename) - - # The following methods define the Backend API - - def filesize(self, filename): - raise NotImplemented - #return os.path.getsize(self._get_absolute_path(filename)) - - def path(self, filename): - paths = self.get_mogile_paths(filename) - if paths: - return self.get_mogile_paths(filename)[0] - else: - return None - - def url(self, filename): - return urlparse.urljoin(self.base_url, filename).replace('\\', '/') - - def open(self, filename, mode='rb'): - raise NotImplemented - #return open(self._get_absolute_path(filename), mode) - - def exists(self, filename): - return filename in self.client - - def save(self, filename, raw_contents, max_length=None): - filename = self.get_available_name(filename, max_length) - - if not hasattr(self, 'mogile_class'): - self.mogile_class = None - - # Write the file to mogile - success = self.client.send_file(filename, BytesIO(raw_contents), self.mogile_class) - if success: - print("Wrote file to key %s, %s@%s" % (filename, self.domain, self.trackers[0])) - else: - print("FAILURE writing file %s" % (filename)) - - return force_text(filename.replace('\\', '/')) - - def delete(self, filename): - self.client.delete(filename) - - -def serve_mogilefs_file(request, key=None): - """ - Called when a user requests an image. - Either reproxy the path to perlbal, or serve the image outright - """ - # not the best way to do this, since we create a client each time - mimetype = mimetypes.guess_type(key)[0] or "application/x-octet-stream" - client = mogilefs.Client(settings.MOGILEFS_DOMAIN, settings.MOGILEFS_TRACKERS) - if hasattr(settings, "SERVE_WITH_PERLBAL") and settings.SERVE_WITH_PERLBAL: - # we're reproxying with perlbal - - # check the path cache - - path = cache.get(key) - - if not path: - path = client.get_paths(key) - cache.set(key, path, 60) - - if path: - response = HttpResponse(content_type=mimetype) - response['X-REPROXY-URL'] = path[0] - else: - response = HttpResponseNotFound() - - else: - # we don't have perlbal, let's just serve the image via django - file_data = client[key] - if file_data: - response = HttpResponse(file_data, mimetype=mimetype) - else: - response = HttpResponseNotFound() - - return response diff --git a/storages/backends/overwrite.py b/storages/backends/overwrite.py deleted file mode 100644 index 0a55059c1..000000000 --- a/storages/backends/overwrite.py +++ /dev/null @@ -1,29 +0,0 @@ -import warnings - -from django.core.files.storage import FileSystemStorage -from django.utils.deconstruct import deconstructible - -warnings.warn( - 'OverwriteStorage is unmaintained and will be removed in the next django-storages version.' - 'See https://github.com/jschneier/django-storages/issues/202', - PendingDeprecationWarning -) - - -@deconstructible -class OverwriteStorage(FileSystemStorage): - """ - Comes from http://www.djangosnippets.org/snippets/976/ - (even if it already exists in S3Storage for ages) - - See also Django #4339, which might add this functionality to core. - """ - - def get_available_name(self, name, max_length=None): - """ - Returns a filename that's free on the target storage system, and - available for new content to be written to. - """ - if self.exists(name): - self.delete(name) - return name diff --git a/storages/backends/symlinkorcopy.py b/storages/backends/symlinkorcopy.py deleted file mode 100644 index e5b6e7ef3..000000000 --- a/storages/backends/symlinkorcopy.py +++ /dev/null @@ -1,71 +0,0 @@ -import os -import warnings - -from django.conf import settings -from django.core.files.storage import FileSystemStorage -from django.utils.deconstruct import deconstructible - -__doc__ = """ -I needed to efficiently create a mirror of a directory tree (so that -"origin pull" CDNs can automatically pull files). The trick was that -some files could be modified, and some could be identical to the original. -Of course it doesn't make sense to store the exact same data twice on the -file system. So I created SymlinkOrCopyStorage. - -SymlinkOrCopyStorage allows you to symlink a file when it's identical to -the original file and to copy the file if it's modified. -Of course, it's impossible to know if a file is modified just by looking -at the file, without knowing what the original file was. -That's what the symlinkWithin parameter is for. It accepts one or more paths -(if multiple, they should be concatenated using a colon (:)). -Files that will be saved using SymlinkOrCopyStorage are then checked on their -location: if they are within one of the symlink_within directories, -they will be symlinked, otherwise they will be copied. - -The rationale is that unmodified files will exist in their original location, -e.g. /htdocs/example.com/image.jpg and modified files will be stored in -a temporary directory, e.g. /tmp/image.jpg. -""" -warnings.warn( - 'SymlinkOrCopyStorage is unmaintained and will be removed in the next django-storages version.' - 'See https://github.com/jschneier/django-storages/issues/202', - PendingDeprecationWarning -) - - -@deconstructible -class SymlinkOrCopyStorage(FileSystemStorage): - """Stores symlinks to files instead of actual files whenever possible - - When a file that's being saved is currently stored in the symlink_within - directory, then symlink the file. Otherwise, copy the file. - """ - def __init__(self, location=settings.MEDIA_ROOT, base_url=settings.MEDIA_URL, - symlink_within=None): - super(SymlinkOrCopyStorage, self).__init__(location, base_url) - self.symlink_within = symlink_within.split(":") - - def _save(self, name, content): - full_path_dst = self.path(name) - - directory = os.path.dirname(full_path_dst) - if not os.path.exists(directory): - os.makedirs(directory) - elif not os.path.isdir(directory): - raise IOError("%s exists and is not a directory." % directory) - - full_path_src = os.path.abspath(content.name) - - symlinked = False - # Only symlink if the current platform supports it. - if getattr(os, "symlink", False): - for path in self.symlink_within: - if full_path_src.startswith(path): - os.symlink(full_path_src, full_path_dst) - symlinked = True - break - - if not symlinked: - super(SymlinkOrCopyStorage, self)._save(name, content) - - return name diff --git a/tests/test_hashpath.py b/tests/test_hashpath.py deleted file mode 100644 index 5cc4d6571..000000000 --- a/tests/test_hashpath.py +++ /dev/null @@ -1,35 +0,0 @@ -import os -import shutil - -from django.test import TestCase -from django.core.files.base import ContentFile -from django.conf import settings - -from storages.backends.hashpath import HashPathStorage - -TEST_PATH_PREFIX = 'django-storages-test' - - -class HashPathStorageTest(TestCase): - - def setUp(self): - self.test_path = os.path.join(settings.MEDIA_ROOT, TEST_PATH_PREFIX) - self.storage = HashPathStorage(location=self.test_path) - - # make sure the profile upload folder exists - if not os.path.exists(self.test_path): - os.makedirs(self.test_path) - - def tearDown(self): - # remove uploaded profile picture - if os.path.exists(self.test_path): - shutil.rmtree(self.test_path) - - def test_save_same_file(self): - """ - saves a file twice, the file should only be stored once, because the - content/hash is the same - """ - path_1 = self.storage.save('test', ContentFile('new content')) - path_2 = self.storage.save('test', ContentFile('new content')) - self.assertEqual(path_1, path_2) From 25b1b2810ec4272e2eae1edb6948298ae0c4e132 Mon Sep 17 00:00:00 2001 From: Josh Schneier Date: Mon, 13 Mar 2017 16:51:58 -0300 Subject: [PATCH 069/174] CHANGELOG for #280 --- CHANGELOG.rst | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.rst b/CHANGELOG.rst index 0fcb22220..abd085d2b 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -1,11 +1,13 @@ django-storages change log ========================== -1.5.3 (XXXX-XX-XX) +1.6.0 (XXXX-XX-XX) ****************** +* **Remove backends deprecated in v1.5.1** (`#280`_) * Pass in the location constraint when auto creating a bucket (`#257`_, `#258`_ thanks @mattayes) +.. _#280: https://github.com/jschneier/django-storages/pull/280 .. _#257: https://github.com/jschneier/django-storages/issues/257 .. _#258: https://github.com/jschneier/django-storages/pull/258 From 043b91b10ebfebdf7e752d743ae630f0daa2c4f4 Mon Sep 17 00:00:00 2001 From: David D Lowe Date: Mon, 13 Mar 2017 20:55:29 +0100 Subject: [PATCH 070/174] Correct example in documentation of S3 parameters (#275) When using the key "Cache-Control" in AWS_S3_OBJECT_PARAMETERS, I got this error: > ValueError: Invalid extra_args key 'Cache-Control', must be one of: ACL, CacheControl, ContentDisposition, ContentEncoding, ContentLanguage, ContentType, Expires, GrantFullControl, GrantRead, GrantReadACP, GrantWriteACP, Metadata, RequestPayer, ServerSideEncryption, StorageClass, SSECustomerAlgorithm, SSECustomerKey, SSECustomerKeyMD5, SSEKMSKeyId, WebsiteRedirectLocation I fixed it by replacing "Cache-Control" with "CacheControl". I fixed the documentation to use the correct key, and to remove the word arbitrary, since it doesn't seem arbitrary to me. --- docs/backends/amazon-S3.rst | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/backends/amazon-S3.rst b/docs/backends/amazon-S3.rst index ba5cfe583..7fc676ba4 100644 --- a/docs/backends/amazon-S3.rst +++ b/docs/backends/amazon-S3.rst @@ -56,10 +56,10 @@ Available are numerous settings. It should be especially noted the following: } ``AWS_S3_OBJECT_PARAMETERS`` (optional - boto3 only) - Use this to set arbitrary parameters on your object (such as Cache-Control):: + Use this to set object parameters on your object (such as CacheControl):: AWS_S3_OBJECT_PARAMETERS = { - 'Cache-Control': 'max-age=86400', + 'CacheControl': 'max-age=86400', } ``AWS_QUERYSTRING_AUTH`` (optional; default is ``True``) From 94281ffac9eb6b97f211fb398e8aea562f1c07df Mon Sep 17 00:00:00 2001 From: Greg Kempe Date: Fri, 31 Mar 2017 19:59:58 +0200 Subject: [PATCH 071/174] Document extra S3 params. (#284) --- docs/backends/amazon-S3.rst | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/docs/backends/amazon-S3.rst b/docs/backends/amazon-S3.rst index 7fc676ba4..9a7dde6cb 100644 --- a/docs/backends/amazon-S3.rst +++ b/docs/backends/amazon-S3.rst @@ -86,6 +86,15 @@ Available are numerous settings. It should be especially noted the following: ``GZIP_CONTENT_TYPES`` (optional: default is ``text/css``, ``text/javascript``, ``application/javascript``, ``application/x-javascript``, ``image/svg+xml``) When ``AWS_IS_GZIPPED`` is set to ``True`` the content types which will be gzipped +``AWS_S3_REGION_NAME`` (optional: default is ``None``) + Name of the AWS S3 region to use (eg. eu-west-1) + +``AWS_S3_USE_SSL`` (optional: default is ``True``) + Whether or not to use SSL when connecting to S3. + +``AWS_S3_ENDPOINT_URL`` (optional: default is ``None``) + Custom S3 URL to use when connecting to S3, including scheme. Overrides ``AWS_S3_REGION_NAME`` and ``AWS_S3_USE_SSL``. + .. _query parameter authentication: https://docs.aws.amazon.com/AmazonS3/latest/API/sigv4-query-string-auth.html CloudFront From a9c4a3a73fbb02f0fe73d733c25f9b4a9fcf53fa Mon Sep 17 00:00:00 2001 From: Bryan Marty Date: Fri, 31 Mar 2017 11:03:01 -0700 Subject: [PATCH 072/174] Add support for AWS_SESSION_TOKEN and AWS_SECURITY_TOKEN (#283) * Add security token support * Add session token support for s3boto3 --- storages/backends/s3boto.py | 22 +++++++++++++++------- storages/backends/s3boto3.py | 22 +++++++++++++++------- 2 files changed, 30 insertions(+), 14 deletions(-) diff --git a/storages/backends/s3boto.py b/storages/backends/s3boto.py index b36ebde76..7a3556ecf 100644 --- a/storages/backends/s3boto.py +++ b/storages/backends/s3boto.py @@ -205,6 +205,7 @@ class S3BotoStorage(Storage): # used for looking up the access and secret key from env vars access_key_names = ['AWS_S3_ACCESS_KEY_ID', 'AWS_ACCESS_KEY_ID'] secret_key_names = ['AWS_S3_SECRET_ACCESS_KEY', 'AWS_SECRET_ACCESS_KEY'] + security_token_names = ['AWS_SESSION_TOKEN', 'AWS_SECURITY_TOKEN'] access_key = setting('AWS_S3_ACCESS_KEY_ID', setting('AWS_ACCESS_KEY_ID')) secret_key = setting('AWS_S3_SECRET_ACCESS_KEY', setting('AWS_SECRET_ACCESS_KEY')) @@ -270,6 +271,7 @@ def __init__(self, acl=None, bucket=None, **settings): if not self.access_key and not self.secret_key: self.access_key, self.secret_key = self._get_access_keys() + self.security_token = self._get_security_token() @property def connection(self): @@ -277,6 +279,7 @@ def connection(self): self._connection = self.connection_class( self.access_key, self.secret_key, + security_token=self.security_token, is_secure=self.use_ssl, calling_format=self.calling_format, host=self.host, @@ -306,21 +309,26 @@ def entries(self): for entry in self.bucket.list(prefix=self.location)) return self._entries + def _lookup_env(self, names): + for name in names: + value = os.environ.get(name) + if value: + return value + def _get_access_keys(self): """ Gets the access keys to use when accessing S3. If none are provided to the class in the constructor or in the settings then get them from the environment variables. """ - def lookup_env(names): - for name in names: - value = os.environ.get(name) - if value: - return value - access_key = self.access_key or lookup_env(self.access_key_names) - secret_key = self.secret_key or lookup_env(self.secret_key_names) + access_key = self.access_key or self._lookup_env(self.access_key_names) + secret_key = self.secret_key or self._lookup_env(self.secret_key_names) return access_key, secret_key + def _get_security_token(self): + security_token = self._lookup_env(self.security_token_names) + return security_token + def _get_or_create_bucket(self, name): """ Retrieves a bucket if it exists, otherwise creates it. diff --git a/storages/backends/s3boto3.py b/storages/backends/s3boto3.py index a17885fd9..6f50ef2b7 100644 --- a/storages/backends/s3boto3.py +++ b/storages/backends/s3boto3.py @@ -209,6 +209,7 @@ class S3Boto3Storage(Storage): # used for looking up the access and secret key from env vars access_key_names = ['AWS_S3_ACCESS_KEY_ID', 'AWS_ACCESS_KEY_ID'] secret_key_names = ['AWS_S3_SECRET_ACCESS_KEY', 'AWS_SECRET_ACCESS_KEY'] + security_token_names = ['AWS_SESSION_TOKEN', 'AWS_SECURITY_TOKEN'] access_key = setting('AWS_S3_ACCESS_KEY_ID', setting('AWS_ACCESS_KEY_ID')) secret_key = setting('AWS_S3_SECRET_ACCESS_KEY', setting('AWS_SECRET_ACCESS_KEY')) @@ -272,6 +273,7 @@ def __init__(self, acl=None, bucket=None, **settings): if not self.access_key and not self.secret_key: self.access_key, self.secret_key = self._get_access_keys() + self.security_token = self._get_security_token() if not self.config: self.config = Config(s3={'addressing_style': self.addressing_style}, @@ -289,6 +291,7 @@ def connection(self): self.connection_service_name, aws_access_key_id=self.access_key, aws_secret_access_key=self.secret_key, + aws_session_token=self.security_token, region_name=self.region_name, use_ssl=self.use_ssl, endpoint_url=self.endpoint_url, @@ -316,21 +319,26 @@ def entries(self): for entry in self.bucket.objects.filter(Prefix=self.location)) return self._entries + def _lookup_env(self, names): + for name in names: + value = os.environ.get(name) + if value: + return value + def _get_access_keys(self): """ Gets the access keys to use when accessing S3. If none are provided to the class in the constructor or in the settings then get them from the environment variables. """ - def lookup_env(names): - for name in names: - value = os.environ.get(name) - if value: - return value - access_key = self.access_key or lookup_env(self.access_key_names) - secret_key = self.secret_key or lookup_env(self.secret_key_names) + access_key = self.access_key or self._lookup_env(self.access_key_names) + secret_key = self.secret_key or self._lookup_env(self.secret_key_names) return access_key, secret_key + def _get_security_token(self): + security_token = self._lookup_env(self.security_token_names) + return security_token + def _get_or_create_bucket(self, name): """ Retrieves a bucket if it exists, otherwise creates it. From 1ad4890ce97c028a80e2afda9dbfbc00e27c367d Mon Sep 17 00:00:00 2001 From: Josh Schneier Date: Fri, 31 Mar 2017 15:05:27 -0300 Subject: [PATCH 073/174] Update CHANGELOG for #283 --- CHANGELOG.rst | 3 +++ 1 file changed, 3 insertions(+) diff --git a/CHANGELOG.rst b/CHANGELOG.rst index abd085d2b..9f64c2483 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -6,7 +6,10 @@ django-storages change log * **Remove backends deprecated in v1.5.1** (`#280`_) * Pass in the location constraint when auto creating a bucket (`#257`_, `#258`_ thanks @mattayes) +* Add support for reading ``AWS_SESSION_TOKEN`` and ``AWS_SECURITY_TOKEN`` from the environment + to ``S3Boto3Storage`` and ``S3BotoStorage``. (`#283`_ thanks @bxm156) +.. _#283: https://github.com/jschneier/django-storages/pull/280 .. _#280: https://github.com/jschneier/django-storages/pull/280 .. _#257: https://github.com/jschneier/django-storages/issues/257 .. _#258: https://github.com/jschneier/django-storages/pull/258 From 7977533662c7d3c67b7dbccb7500bf1c445f1e26 Mon Sep 17 00:00:00 2001 From: niharathomas Date: Mon, 3 Apr 2017 11:56:11 -0700 Subject: [PATCH 074/174] Added AWS_S3_CALLING_FORMAT to AWS S3 documentation (#288) --- docs/backends/amazon-S3.rst | 3 +++ 1 file changed, 3 insertions(+) diff --git a/docs/backends/amazon-S3.rst b/docs/backends/amazon-S3.rst index 9a7dde6cb..07b38ffe7 100644 --- a/docs/backends/amazon-S3.rst +++ b/docs/backends/amazon-S3.rst @@ -95,6 +95,9 @@ Available are numerous settings. It should be especially noted the following: ``AWS_S3_ENDPOINT_URL`` (optional: default is ``None``) Custom S3 URL to use when connecting to S3, including scheme. Overrides ``AWS_S3_REGION_NAME`` and ``AWS_S3_USE_SSL``. +``AWS_S3_CALLING_FORMAT`` (optional: default is ``SubdomainCallingFormat()``) + Defines the S3 calling format to use to connect to the static bucket. + .. _query parameter authentication: https://docs.aws.amazon.com/AmazonS3/latest/API/sigv4-query-string-auth.html CloudFront From 25297291a1a6491d989ad7b4179c61701e9d56bd Mon Sep 17 00:00:00 2001 From: Josh Schneier Date: Tue, 4 Apr 2017 14:05:34 -0400 Subject: [PATCH 075/174] Don't break if we aren't using session tokens --- storages/backends/s3boto.py | 1 + storages/backends/s3boto3.py | 1 + 2 files changed, 2 insertions(+) diff --git a/storages/backends/s3boto.py b/storages/backends/s3boto.py index 7a3556ecf..0ea726d04 100644 --- a/storages/backends/s3boto.py +++ b/storages/backends/s3boto.py @@ -269,6 +269,7 @@ def __init__(self, acl=None, bucket=None, **settings): self._bucket = None self._connection = None + self.security_token = None if not self.access_key and not self.secret_key: self.access_key, self.secret_key = self._get_access_keys() self.security_token = self._get_security_token() diff --git a/storages/backends/s3boto3.py b/storages/backends/s3boto3.py index 6f50ef2b7..d8cfcbcf2 100644 --- a/storages/backends/s3boto3.py +++ b/storages/backends/s3boto3.py @@ -271,6 +271,7 @@ def __init__(self, acl=None, bucket=None, **settings): self._bucket = None self._connection = None + self.security_token = None if not self.access_key and not self.secret_key: self.access_key, self.secret_key = self._get_access_keys() self.security_token = self._get_security_token() From 7f5838c8e4a213203e2ebb98c61e778279a48532 Mon Sep 17 00:00:00 2001 From: Josh Schneier Date: Tue, 4 Apr 2017 15:40:18 -0400 Subject: [PATCH 076/174] Style cleanup --- storages/backends/s3boto.py | 29 +++++++++++++---------------- storages/backends/s3boto3.py | 5 ++--- 2 files changed, 15 insertions(+), 19 deletions(-) diff --git a/storages/backends/s3boto.py b/storages/backends/s3boto.py index 0ea726d04..b8475b584 100644 --- a/storages/backends/s3boto.py +++ b/storages/backends/s3boto.py @@ -51,7 +51,7 @@ def safe_join(base, *paths): final_path = base_path for path in paths: - final_path = urlparse.urljoin(final_path.rstrip('/') + "/", path) + final_path = urlparse.urljoin(final_path.rstrip('/') + '/', path) # Ensure final_path starts with base_path and that the next character after # the final path is '/' (or nothing, in which case final_path must be @@ -114,8 +114,8 @@ def _get_file(self): if self._file is None: self._file = SpooledTemporaryFile( max_size=self._storage.max_memory_size, - suffix=".S3BotoStorageFile", - dir=setting("FILE_UPLOAD_TEMP_DIR", None) + suffix='.S3BotoStorageFile', + dir=setting('FILE_UPLOAD_TEMP_DIR', None) ) if 'r' in self._mode: self._is_dirty = False @@ -132,12 +132,12 @@ def _set_file(self, value): def read(self, *args, **kwargs): if 'r' not in self._mode: - raise AttributeError("File was not opened in read mode.") + raise AttributeError('File was not opened in read mode.') return super(S3BotoStorageFile, self).read(*args, **kwargs) def write(self, content, *args, **kwargs): if 'w' not in self._mode: - raise AttributeError("File was not opened in write mode.") + raise AttributeError('File was not opened in write mode.') self._is_dirty = True if self._multipart is None: provider = self.key.bucket.connection.provider @@ -165,9 +165,6 @@ def _buffer_file_size(self): return length def _flush_write_buffer(self): - """ - Flushes the write buffer. - """ if self._buffer_file_size: self._write_counter += 1 self.file.seek(0) @@ -180,7 +177,7 @@ def close(self): self._flush_write_buffer() self._multipart.complete_upload() else: - if not self._multipart is None: + if self._multipart is not None: self._multipart.cancel_upload() self.key.close() if self._file is not None: @@ -341,10 +338,10 @@ def _get_or_create_bucket(self, name): bucket = self.connection.create_bucket(name, location=self.origin) bucket.set_acl(self.bucket_acl) return bucket - raise ImproperlyConfigured("Bucket %s does not exist. Buckets " - "can be automatically created by " - "setting AWS_AUTO_CREATE_BUCKET to " - "``True``." % name) + raise ImproperlyConfigured('Bucket %s does not exist. Buckets ' + 'can be automatically created by ' + 'setting AWS_AUTO_CREATE_BUCKET to ' + '``True``.' % name) def _clean_name(self, name): """ @@ -471,9 +468,9 @@ def listdir(self, name): dirlist = self.bucket.list(self._encode_name(name)) files = [] dirs = set() - base_parts = name.split("/")[:-1] + base_parts = name.split('/')[:-1] for item in dirlist: - parts = item.name.split("/") + parts = item.name.split('/') parts = parts[len(base_parts):] if len(parts) == 1: # File @@ -506,7 +503,7 @@ def url(self, name, headers=None, response_headers=None, expire=None): # Preserve the trailing slash after normalizing the path. name = self._normalize_name(self._clean_name(name)) if self.custom_domain: - return "%s//%s/%s" % (self.url_protocol, + return '%s//%s/%s' % (self.url_protocol, self.custom_domain, filepath_to_uri(name)) if expire is None: diff --git a/storages/backends/s3boto3.py b/storages/backends/s3boto3.py index d8cfcbcf2..9ea1a90a6 100644 --- a/storages/backends/s3boto3.py +++ b/storages/backends/s3boto3.py @@ -393,9 +393,8 @@ def _clean_name(self, name): # a workaround here. if name.endswith('/') and not clean_name.endswith('/'): # Add a trailing slash as it was stripped. - return clean_name + '/' - else: - return clean_name + clean_name += '/' + return clean_name def _normalize_name(self, name): """ From 01f105e49a14d8d6fb9a8c393774b046db3dbb22 Mon Sep 17 00:00:00 2001 From: Guillaume Andreu Sabater Date: Tue, 4 Apr 2017 22:26:33 +0200 Subject: [PATCH 077/174] Apply smart_text on file names in python2.7 [s3boto3] (#217) --- storages/backends/s3boto3.py | 4 ++-- tests/test_s3boto3.py | 17 ++++++++++++++++- 2 files changed, 18 insertions(+), 3 deletions(-) diff --git a/storages/backends/s3boto3.py b/storages/backends/s3boto3.py index 9ea1a90a6..2e5241734 100644 --- a/storages/backends/s3boto3.py +++ b/storages/backends/s3boto3.py @@ -8,7 +8,7 @@ from django.core.files.base import File from django.core.files.storage import Storage from django.utils.deconstruct import deconstructible -from django.utils.encoding import force_text, smart_str, filepath_to_uri, force_bytes +from django.utils.encoding import force_text, smart_text, filepath_to_uri, force_bytes from django.utils.six.moves.urllib import parse as urlparse from django.utils.six import BytesIO from django.utils.timezone import localtime, is_naive @@ -409,7 +409,7 @@ def _normalize_name(self, name): name) def _encode_name(self, name): - return smart_str(name, encoding=self.file_name_charset) + return smart_text(name, encoding=self.file_name_charset) def _decode_name(self, name): return force_text(name, encoding=self.file_name_charset) diff --git a/tests/test_s3boto3.py b/tests/test_s3boto3.py index 0f5a6a9b7..69b1f9fbb 100644 --- a/tests/test_s3boto3.py +++ b/tests/test_s3boto3.py @@ -1,3 +1,6 @@ +# -*- coding: utf-8 -*- +from __future__ import unicode_literals + from datetime import datetime import gzip try: @@ -161,7 +164,7 @@ def test_storage_open_write(self): """ Test opening a file in write mode """ - name = 'test_open_for_writing.txt' + name = 'test_open_for_writïng.txt' content = 'new content' # Set the encryption flag used for multipart uploads @@ -336,6 +339,18 @@ def test_generated_url_is_encoded(self): "/whacky%20%26%20filename.mp4") self.assertFalse(self.storage.bucket.meta.client.generate_presigned_url.called) + def test_special_characters(self): + self.storage.custom_domain = "mock.cloudfront.net" + + name = "ãlöhâ.jpg" + content = ContentFile('new content') + self.storage.save(name, content) + self.storage.bucket.Object.assert_called_once_with(name) + + url = self.storage.url(name) + parsed_url = urlparse.urlparse(url) + self.assertEqual(parsed_url.path, "/%C3%A3l%C3%B6h%C3%A2.jpg") + def test_strip_signing_parameters(self): expected = 'http://bucket.s3-aws-region.amazonaws.com/foo/bar' self.assertEqual(self.storage._strip_signing_parameters( From d032e476f72ba9fb09c451c994ee1d7219fad1a7 Mon Sep 17 00:00:00 2001 From: Josh Schneier Date: Tue, 4 Apr 2017 16:30:11 -0400 Subject: [PATCH 078/174] Update CHANGELOG for #217, #217 and fix typo --- CHANGELOG.rst | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.rst b/CHANGELOG.rst index 9f64c2483..68f8185dc 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -8,8 +8,11 @@ django-storages change log * Pass in the location constraint when auto creating a bucket (`#257`_, `#258`_ thanks @mattayes) * Add support for reading ``AWS_SESSION_TOKEN`` and ``AWS_SECURITY_TOKEN`` from the environment to ``S3Boto3Storage`` and ``S3BotoStorage``. (`#283`_ thanks @bxm156) +* Fix Boto3 non-ascii filenames on Python2.7 (`#216`_, `#217`_ thanks @AGASS007) -.. _#283: https://github.com/jschneier/django-storages/pull/280 +.. _#217: https://github.com/jschneier/django-storages/pull/217 +.. _#216: https://github.com/jschneier/django-storages/issues/216 +.. _#283: https://github.com/jschneier/django-storages/pull/283 .. _#280: https://github.com/jschneier/django-storages/pull/280 .. _#257: https://github.com/jschneier/django-storages/issues/257 .. _#258: https://github.com/jschneier/django-storages/pull/258 From ab574fd62adb8e2a9bf636d31896e0f88b6643b1 Mon Sep 17 00:00:00 2001 From: Josh Schneier Date: Thu, 6 Apr 2017 12:42:30 -0400 Subject: [PATCH 079/174] Fix collectstatic tz handling (#290) * Add a centralized check for getting a key with preload metadata logic * Fix timezone overwriting for collectstatic on Django < 1.10 * Add support for `get_modified_time` --- storages/backends/s3boto.py | 35 +++++++++++++++-------------------- tests/test_s3boto.py | 32 +++++++++++++++++++++++++++----- 2 files changed, 42 insertions(+), 25 deletions(-) diff --git a/storages/backends/s3boto.py b/storages/backends/s3boto.py index b8475b584..843f61b41 100644 --- a/storages/backends/s3boto.py +++ b/storages/backends/s3boto.py @@ -12,6 +12,7 @@ from django.utils.encoding import force_text, smart_str, filepath_to_uri, force_bytes from django.utils.six import BytesIO from django.utils.six.moves.urllib import parse as urlparse +from django.utils import timezone as tz try: from boto import __version__ as boto_version @@ -440,6 +441,12 @@ def _save_content(self, key, content, headers): reduced_redundancy=self.reduced_redundancy, rewind=True, **kwargs) + def _get_key(self, name): + name = self._normalize_name(self._clean_name(name)) + if self.entries: + return self.entries[name] + return self.bucket.get_key(self._encode_name(name)) + def delete(self, name): name = self._normalize_name(self._clean_name(name)) self.bucket.delete_key(self._encode_name(name)) @@ -452,11 +459,7 @@ def exists(self, name): except ImproperlyConfigured: return False - name = self._normalize_name(self._clean_name(name)) - if self.entries: - return name in self.entries - k = self.bucket.new_key(self._encode_name(name)) - return k.exists() + return self._get_key(name) is not None def listdir(self, name): name = self._normalize_name(self._clean_name(name)) @@ -481,23 +484,15 @@ def listdir(self, name): return list(dirs), files def size(self, name): - name = self._normalize_name(self._clean_name(name)) - if self.entries: - entry = self.entries.get(name) - if entry: - return entry.size - return 0 - return self.bucket.get_key(self._encode_name(name)).size + return self._get_key(name).size + + def get_modified_time(self, name): + dt = tz.make_aware(parse_ts(self._get_key(name).last_modified), tz.utc) + return dt if setting('USE_TZ') else tz.make_naive(dt) def modified_time(self, name): - name = self._normalize_name(self._clean_name(name)) - entry = self.entries.get(name) - # only call self.bucket.get_key() if the key is not found - # in the preloaded metadata. - if entry is None: - entry = self.bucket.get_key(self._encode_name(name)) - # Parse the last_modified string to a local datetime object. - return parse_ts(entry.last_modified) + dt = tz.make_aware(parse_ts(self._get_key(name).last_modified), tz.utc) + return tz.make_naive(dt) def url(self, name, headers=None, response_headers=None, expire=None): # Preserve the trailing slash after normalizing the path. diff --git a/tests/test_s3boto.py b/tests/test_s3boto.py index 93e678d1d..f2a55476b 100644 --- a/tests/test_s3boto.py +++ b/tests/test_s3boto.py @@ -8,6 +8,7 @@ from django.test import TestCase from django.core.files.base import ContentFile from django.utils.six.moves.urllib import parse as urlparse +from django.utils import timezone as tz from boto.exception import S3ResponseError from boto.s3.key import Key @@ -219,13 +220,11 @@ def test_storage_exists_bucket(self): self.assertTrue(self.storage.exists('')) def test_storage_exists(self): - key = self.storage.bucket.new_key.return_value - key.exists.return_value = True + self.storage.bucket.get_key.return_value = mock.MagicMock(spec=Key) self.assertTrue(self.storage.exists("file.txt")) def test_storage_exists_false(self): - key = self.storage.bucket.new_key.return_value - key.exists.return_value = False + self.storage.bucket.get_key.return_value = None self.assertFalse(self.storage.exists("file.txt")) def test_storage_delete(self): @@ -322,8 +321,31 @@ def test_new_file_modified_time(self): name = 'test_storage_save.txt' content = ContentFile('new content') utcnow = datetime.datetime.utcnow() - with mock.patch('storages.backends.s3boto.datetime') as mock_datetime: + with mock.patch('storages.backends.s3boto.datetime') as mock_datetime, self.settings(TIME_ZONE='UTC'): mock_datetime.utcnow.return_value = utcnow self.storage.save(name, content) self.assertEqual(self.storage.modified_time(name), parse_ts(utcnow.strftime(ISO8601))) + + @mock.patch('storages.backends.s3boto.S3BotoStorage._get_key') + def test_get_modified_time(self, getkey): + utcnow = datetime.datetime.utcnow().strftime(ISO8601) + + with self.settings(USE_TZ=True, TIME_ZONE='America/New_York'): + key = mock.MagicMock(spec=Key) + key.last_modified = utcnow + getkey.return_value = key + modtime = self.storage.get_modified_time('foo') + self.assertFalse(tz.is_naive(modtime)) + self.assertEqual(modtime, + tz.make_aware(datetime.datetime.strptime(utcnow, ISO8601), tz.utc)) + + with self.settings(USE_TZ=False, TIME_ZONE='America/New_York'): + key = mock.MagicMock(spec=Key) + key.last_modified = utcnow + getkey.return_value = key + modtime = self.storage.get_modified_time('foo') + self.assertTrue(tz.is_naive(modtime)) + self.assertEqual(modtime, + tz.make_naive(tz.make_aware( + datetime.datetime.strptime(utcnow, ISO8601), tz.utc))) From 601089934ffee4c877df5c88fe068dbaa12376f3 Mon Sep 17 00:00:00 2001 From: Jon Dufresne Date: Sat, 8 Apr 2017 09:52:47 -0700 Subject: [PATCH 080/174] Add testing for Django 1.11 and Python 3.6 (#295) Fixes #289 --- .travis.yml | 44 ++++++++++++++++++++++++++++-------------- requirements-tests.txt | 2 +- setup.py | 2 ++ tox.ini | 4 +++- 4 files changed, 36 insertions(+), 16 deletions(-) diff --git a/.travis.yml b/.travis.yml index da94abbce..91aeef546 100644 --- a/.travis.yml +++ b/.travis.yml @@ -2,20 +2,36 @@ sudo: false language: python cache: pip -python: - - 3.5 - -env: - - TOX_ENV=py27-django18 - - TOX_ENV=py33-django18 - - TOX_ENV=py34-django18 - - TOX_ENV=py35-django18 - - TOX_ENV=py27-django19 - - TOX_ENV=py34-django19 - - TOX_ENV=py35-django19 - - TOX_ENV=py27-django110 - - TOX_ENV=py34-django110 - - TOX_ENV=py35-django110 +matrix: + include: + - python: 2.7 + env: TOX_ENV=py27-django18 + - python: 3.3 + env: TOX_ENV=py33-django18 + - python: 3.4 + env: TOX_ENV=py34-django18 + - python: 3.5 + env: TOX_ENV=py35-django18 + - python: 2.7 + env: TOX_ENV=py27-django19 + - python: 3.4 + env: TOX_ENV=py34-django19 + - python: 3.5 + env: TOX_ENV=py35-django19 + - python: 2.7 + env: TOX_ENV=py27-django110 + - python: 3.4 + env: TOX_ENV=py34-django110 + - python: 3.5 + env: TOX_ENV=py35-django110 + - python: 2.7 + env: TOX_ENV=py27-django111 + - python: 3.4 + env: TOX_ENV=py34-django111 + - python: 3.5 + env: TOX_ENV=py35-django111 + - python: 3.6 + env: TOX_ENV=py36-django111 before_install: - pip install codecov diff --git a/requirements-tests.txt b/requirements-tests.txt index d87d26210..4fb5282ea 100644 --- a/requirements-tests.txt +++ b/requirements-tests.txt @@ -1,4 +1,4 @@ -Django>=1.7 +Django>=1.8 pytest-cov>=2.2.1 boto>=2.32.0 boto3>=1.2.3 diff --git a/setup.py b/setup.py index bdecbba22..c7a6d3373 100644 --- a/setup.py +++ b/setup.py @@ -28,6 +28,7 @@ def get_requirements_tests(): 'Framework :: Django :: 1.8', 'Framework :: Django :: 1.9', 'Framework :: Django :: 1.10', + 'Framework :: Django :: 1.11', 'Intended Audience :: Developers', 'License :: OSI Approved :: BSD License', 'Operating System :: OS Independent', @@ -38,6 +39,7 @@ def get_requirements_tests(): 'Programming Language :: Python :: 3.3', 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: 3.5', + 'Programming Language :: Python :: 3.6', ], tests_require=get_requirements_tests(), test_suite='tests', diff --git a/tox.ini b/tox.ini index d181cd7e5..eaa765377 100644 --- a/tox.ini +++ b/tox.ini @@ -3,6 +3,7 @@ envlist = {py27,py33,py34,py35}-django18, {py27,py34,py35}-django19 {py27,py34,py35}-django110 + {py27,py34,py35,py36}-django111 [testenv] @@ -14,7 +15,8 @@ deps = django18: Django>=1.8, <1.9 django19: Django>=1.9, <1.10 django110: Django>=1.10, <1.11 - py27: mock==1.0.1 + django111: Django>=1.11, <2.0 + py27: mock boto>=2.32.0 pytest-cov>=2.2.1 boto3>=1.2.3 From f471345766cb6d8a6047ecbd42366f00ff5d3e9b Mon Sep 17 00:00:00 2001 From: Ryan Prater Date: Sat, 8 Apr 2017 11:53:03 -0500 Subject: [PATCH 081/174] Update to Amazon S3 Documentation (#294) Included note indicating that AWS_S3_CUSTOM_DOMAIN must *not* end in a slash. It is required that `STATIC_URL` must end in a trailing slash, and if users are using CloudFront, their `STATIC_URL` and `AWS_S3_CUSTOM_DOMAIN` values will be the same. It is very easy for users to miss the trailing `/` and instead set `AWS_S3_CUSTOM_DOMAIN = STATIC_URL`. --- docs/backends/amazon-S3.rst | 3 +++ 1 file changed, 3 insertions(+) diff --git a/docs/backends/amazon-S3.rst b/docs/backends/amazon-S3.rst index 07b38ffe7..1dbd04649 100644 --- a/docs/backends/amazon-S3.rst +++ b/docs/backends/amazon-S3.rst @@ -107,6 +107,9 @@ If you're using S3 as a CDN (via CloudFront), you'll probably want this storage to serve those files using that:: AWS_S3_CUSTOM_DOMAIN = 'cdn.mydomain.com' +**NOTE:** Django's `STATIC_URL` `must end in a slash`_ and the `AWS_S3_CUSTOM_DOMAIN` *must not*. It is best to set this variable indepedently of `STATIC_URL`. + +.. _must end in a slash: https://docs.djangoproject.com/en/dev/ref/settings/#static-url Keep in mind you'll have to configure CloudFront to use the proper bucket as an origin manually for this to work. From 304ac4387e199fa796d238cc37886bb851e93448 Mon Sep 17 00:00:00 2001 From: Josh Schneier Date: Sat, 8 Apr 2017 13:42:08 -0400 Subject: [PATCH 082/174] Update CHANGELOG for recent merges --- CHANGELOG.rst | 18 +++++++++++------- 1 file changed, 11 insertions(+), 7 deletions(-) diff --git a/CHANGELOG.rst b/CHANGELOG.rst index 68f8185dc..a4615da7b 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -4,11 +4,13 @@ django-storages change log 1.6.0 (XXXX-XX-XX) ****************** -* **Remove backends deprecated in v1.5.1** (`#280`_) +* *Breaking: Remove backends deprecated in v1.5.1* (`#280`_) * Pass in the location constraint when auto creating a bucket (`#257`_, `#258`_ thanks @mattayes) * Add support for reading ``AWS_SESSION_TOKEN`` and ``AWS_SECURITY_TOKEN`` from the environment to ``S3Boto3Storage`` and ``S3BotoStorage``. (`#283`_ thanks @bxm156) -* Fix Boto3 non-ascii filenames on Python2.7 (`#216`_, `#217`_ thanks @AGASS007) +* Fix Boto3 non-ascii filenames on Python 2.7 (`#216`_, `#217`_ thanks @AGASS007) +* Fix ``collectstatic`` timezone handling in and add ``get_modified_time`` to ``S3BotoStorage`` (`#290`_) +* Add support for Django 1.11 (`#295`_ thanks @jdufresne) .. _#217: https://github.com/jschneier/django-storages/pull/217 .. _#216: https://github.com/jschneier/django-storages/issues/216 @@ -16,6 +18,8 @@ django-storages change log .. _#280: https://github.com/jschneier/django-storages/pull/280 .. _#257: https://github.com/jschneier/django-storages/issues/257 .. _#258: https://github.com/jschneier/django-storages/pull/258 +.. _#290: https://github.com/jschneier/django-storages/pull/290 +.. _#295: https://github.com/jschneier/django-storages/pull/295 1.5.2 (2017-01-13) ****************** @@ -36,8 +40,8 @@ django-storages change log 1.5.1 (2016-09-13) ****************** -* **Drop support for Django 1.7** (`#185`_) -* **Deprecate hashpath, image, overwrite, mogile, symlinkorcopy, database, mogile, couchdb.** +* *Breaking: Drop support for Django 1.7* (`#185`_) +* *Breaking: Deprecate hashpath, image, overwrite, mogile, symlinkorcopy, database, mogile, couchdb.* See (`issue #202`_) to discuss maintenance going forward * Use a fixed ``mtime`` argument for ``GzipFile`` in ``S3BotoStorage`` and ``S3Boto3Storage`` to ensure a stable output for gzipped files @@ -63,7 +67,7 @@ django-storages change log * Tests, documentation, add `.readlines` for ``FTPStorage`` (`#175`_) thanks @ZuluPro * Tests and documentation for ``DropBoxStorage`` (`#174`_) thanks @ZuluPro * Fix ``MANIFEST.in`` to not ship ``.pyc`` files. (`#145`_) thanks @fladi -* Enable CI testing of Python3.5 and fix test failure from api change (`#171`_) thanks @tnir +* Enable CI testing of Python 3.5 and fix test failure from api change (`#171`_) thanks @tnir .. _#145: https://github.com/jschneier/django-storages/pull/145 .. _#171: https://github.com/jschneier/django-storages/pull/171 @@ -100,7 +104,7 @@ django-storages change log * Fix memory leak from not closing underlying temp file in ``s3boto`` backend (`#106`_) thanks @kmmbvnr * Allow easily specifying a custom expiry time when generating a url for ``S3BotoStorage`` (`#96`_) thanks @mattbriancon * Check for bucket existence when the empty path ('') is passed to ``storage.exists`` in ``S3BotoStorage`` - - this prevents a crash when running ``collecstatic -c`` on Django 1.9.1 (`#112`_) fixed in `#116`_ thanks @xblitz + this prevents a crash when running ``collectstatic -c`` on Django 1.9.1 (`#112`_) fixed in `#116`_ thanks @xblitz .. _#106: https://github.com/jschneier/django-storages/pull/106 .. _#96: https://github.com/jschneier/django-storages/pull/96 @@ -128,7 +132,7 @@ django-storages change log 1.3 (2015-08-14) **************** -* **Drop Support for Django 1.5 and Python2.6** +* *Breaking: Drop Support for Django 1.5 and Python 2.6* * Remove previously deprecated mongodb backend * Remove previously deprecated ``parse_ts_extended`` from s3boto storage * Add support for Django 1.8+ (`#36`__) From 2f6f6e7a2edda789b2a30dd5967aec6866be20e2 Mon Sep 17 00:00:00 2001 From: Eirik Martiniussen Sylliaas Date: Wed, 27 Apr 2016 01:56:08 +0200 Subject: [PATCH 083/174] Add Google Cloud Storage backend using the gcloud-python library --- storages/backends/google.py | 241 ++++++++++++++++++++++++++++++++++++ 1 file changed, 241 insertions(+) create mode 100644 storages/backends/google.py diff --git a/storages/backends/google.py b/storages/backends/google.py new file mode 100644 index 000000000..1ead8c387 --- /dev/null +++ b/storages/backends/google.py @@ -0,0 +1,241 @@ +import mimetypes +from tempfile import SpooledTemporaryFile + +from django.core.exceptions import ImproperlyConfigured +from django.core.files.base import File +from django.utils.encoding import force_bytes, force_text, smart_str +from storages.compat import Storage +from storages.utils import setting + +try: + from gcloud.storage.client import Client + from gcloud.storage.bucket import Bucket + from gcloud.storage.blob import Blob + from gcloud.exceptions import NotFound +except ImportError: + raise ImproperlyConfigured("Could not load Google Storage bindings.\n" + "See https://github.com/GoogleCloudPlatform/gcloud-python") + + +class GoogleCloudFile(File): + def __init__(self, name, mode, storage, buffer_size=None): + self.name = name + self._mode = mode + self._storage = storage + self.blob = Blob(self.name, storage.bucket) + self._file = None + self._is_dirty = False + + @property + def size(self): + return self.blob.size + + def _get_file(self): + if self._file is None: + self._file = SpooledTemporaryFile( + max_size=self._storage.max_memory_size, + suffix=".GSStorageFile", + dir=setting("FILE_UPLOAD_TEMP_DIR", None) + ) + if 'r' in self._mode: + self._is_dirty = False + self.blob.download_to_file(self._file) + self._file.seek(0) + return self._file + + def _set_file(self, value): + self._file = value + + file = property(_get_file, _set_file) + + def read(self, *args, **kwargs): + if 'r' not in self._mode: + raise AttributeError("File was not opened in read mode.") + return super(GoogleCloudFile, self).read(*args, **kwargs) + + def write(self, content, *args, **kwargs): + if 'w' not in self._mode: + raise AttributeError("File was not opened in write mode.") + self._is_dirty = True + return super(GoogleCloudFile, self).write(force_bytes(content), *args, **kwargs) + + def close(self): + if self._file is not None: + if self._is_dirty: + self.file.seek(0) + content_type, _ = mimetypes.guess_type(self.name) + content_type = getattr(self.file, 'content_type', content_type) + size = getattr(self.file, 'size') + self.blob.upload_from_file(self.file, content_type=content_type, size=size) + self._file.close() + self._file = None + + +class GoogleCloudStorage(Storage): + client_class = Client + bucket_class = Bucket + file_class = GoogleCloudFile + + not_found_exception = NotFound + + project_id = setting('GS_PROJECT_ID', None) + credentials = setting('GS_CREDENTIALS', None) + bucket_name = setting('GS_BUCKET_NAME', None) + auto_create_bucket = setting('GS_AUTO_CREATE_BUCKET', False) + default_acl = setting('GS_DEFAULT_ACL', 'public-read') + bucket_acl = setting('GS_BUCKET_ACL', default_acl) + file_name_charset = setting('GS_FILE_NAME_CHARSET', 'utf-8') + file_overwrite = setting('GS_FILE_OVERWRITE', True) + # The max amount of memory a returned file can take up before being + # rolled over into a temporary file on disk. Default is 0: Do not roll over. + max_memory_size = setting('GS_MAX_MEMORY_SIZE', 0) + + def __init__(self, **settings): + # check if some of the settings we've provided as class attributes + # need to be overwritten with values passed in here + for name, value in settings.items(): + if hasattr(self, name): + setattr(self, name, value) + + self._bucket = None + self._client = None + + @property + def client(self): + if self._client is None: + self._client = self.client_class( + project=self.project_id, + credentials=self.credentials + ) + return self._client + + @property + def bucket(self): + if self._bucket is None: + self._bucket = self._get_or_create_bucket(self.bucket_name) + return self._bucket + + def _get_or_create_bucket(self, name): + """ + Retrieves a bucket if it exists, otherwise creates it. + """ + try: + return self.client.get_bucket(name) + except self.not_found_exception: + if self.auto_create_bucket: + bucket = self.client.create_bucket(name) + bucket.acl.all().grant(self.bucket_acl) + bucket.acl.save() + return bucket + raise ImproperlyConfigured("Bucket %s does not exist. Buckets " + "can be automatically created by " + "setting GS_AUTO_CREATE_BUCKET to " + "``True``." % name) + + def _normalize_name(self, name): + """ + No normalizing supported. This can be implemented later. + TODO: Implement normalizing, like the s3boto backend. + """ + return name + + def _clean_name(self, name): + """ + Cleans the name so that Windows style paths work + """ + # Normalize Windows style paths + clean_name = name.replace('\\', '/') + + # os.path.normpath() can strip trailing slashes so we implement + # a workaround here. + if name.endswith('/') and not clean_name.endswith('/'): + # Add a trailing slash as it was stripped. + return clean_name + '/' + else: + return clean_name + + def _encode_name(self, name): + return smart_str(name, encoding=self.file_name_charset) + + def _decode_name(self, name): + return force_text(name, encoding=self.file_name_charset) + + def _open(self, name, mode='rb'): + name = self._normalize_name(self._clean_name(name)) + file_object = self.file_class(name, mode, self) + if not file_object.blob: + raise IOError('File does not exist: %s' % name) + return file_object + + def _save(self, name, content): + cleaned_name = self._clean_name(name) + name = self._normalize_name(cleaned_name) + content_type, _ = mimetypes.guess_type(name) + content_type = getattr(content, 'content_type', content_type) + size = getattr(content, 'size') + + content.name = cleaned_name + encoded_name = self._encode_name(name) + file = self.file_class(encoded_name, 'rw', self) + file.blob.upload_from_file(content, content_type=content_type, size=size) + return cleaned_name + + def delete(self, name): + name = self._normalize_name(self._clean_name(name)) + self.bucket.delete_blob(self._encode_name(name)) + + def exists(self, name): + if not name: # root element aka the bucket + try: + self.bucket + return True + except ImproperlyConfigured: + return False + + name = self._normalize_name(self._clean_name(name)) + return bool(self.bucket.get_blob(self._encode_name(name))) + + def listdir(self, name): + name = self._normalize_name(self._clean_name(name)) + # for the bucket.list and logic below name needs to end in / + # But for the root path "" we leave it as an empty string + if name and not name.endswith('/'): + name += '/' + + files_list = list(self.bucket.list_blobs(prefix=self._encode_name(name))) + files = [] + dirs = set() + + base_parts = name.split("/")[:-1] + for item in files_list: + parts = item.name.split("/") + parts = parts[len(base_parts):] + if len(parts) == 1 and parts[0]: + # File + files.append(parts[0]) + elif len(parts) > 1 and parts[0]: + # Directory + dirs.add(parts[0]) + return list(dirs), files + + def size(self, name): + name = self._encode_name(self._normalize_name(self._clean_name(name))) + blob = self.bucket.get_blob(self._encode_name(name)) + return blob.size if blob else 0 + + def modified_time(self, name): + name = self._normalize_name(self._clean_name(name)) + blob = self.bucket.get_blob(self._encode_name(name)) + return blob.updated if blob else None + + def url(self, name): + # Preserve the trailing slash after normalizing the path. + name = self._normalize_name(self._clean_name(name)) + blob = self.bucket.get_blob(self._encode_name(name)) + return blob.public_url if blob else None + + def get_available_name(self, name, max_length=None): + if self.file_overwrite: + name = self._clean_name(name) + return name + return super(GoogleCloudStorage, self).get_available_name(name, max_length) From 85c53b5220a8ac9cf308e8b41f58bb3178044295 Mon Sep 17 00:00:00 2001 From: Jody McIntyre Date: Wed, 14 Dec 2016 02:24:47 +0000 Subject: [PATCH 084/174] Rename to gcloud, and import google.cloud The "gcloud" Python module is now called "google.cloud", so use the new name. This means the name of this module needs to change too, since it can no longer be "google". --- storages/backends/{google.py => gcloud.py} | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) rename storages/backends/{google.py => gcloud.py} (97%) diff --git a/storages/backends/google.py b/storages/backends/gcloud.py similarity index 97% rename from storages/backends/google.py rename to storages/backends/gcloud.py index 1ead8c387..336bd34a1 100644 --- a/storages/backends/google.py +++ b/storages/backends/gcloud.py @@ -8,10 +8,10 @@ from storages.utils import setting try: - from gcloud.storage.client import Client - from gcloud.storage.bucket import Bucket - from gcloud.storage.blob import Blob - from gcloud.exceptions import NotFound + from google.cloud.storage.client import Client + from google.cloud.storage.bucket import Bucket + from google.cloud.storage.blob import Blob + from google.cloud.exceptions import NotFound except ImportError: raise ImproperlyConfigured("Could not load Google Storage bindings.\n" "See https://github.com/GoogleCloudPlatform/gcloud-python") From b3ec8b5196da45a05585c6db2974399815daa210 Mon Sep 17 00:00:00 2001 From: Jody McIntyre Date: Wed, 14 Dec 2016 02:59:40 +0000 Subject: [PATCH 085/174] Add @deconstructible to GoogleCloudStorage This decorator is required to allow Django 1.7 migrations to serialize instances of this class. Without the decorator, running "makemigrations" on code like the following will fail with a "Cannot Serialize" error. from storages.backends.gcloud import GoogleCloudStorage gcs = GoogleCloudStorage() class SomeModel(models.Models): some_field = models.FileField(storage=gcs) --- storages/backends/gcloud.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/storages/backends/gcloud.py b/storages/backends/gcloud.py index 336bd34a1..6d8626f75 100644 --- a/storages/backends/gcloud.py +++ b/storages/backends/gcloud.py @@ -3,6 +3,7 @@ from django.core.exceptions import ImproperlyConfigured from django.core.files.base import File +from django.utils.deconstruct import deconstructible from django.utils.encoding import force_bytes, force_text, smart_str from storages.compat import Storage from storages.utils import setting @@ -71,6 +72,7 @@ def close(self): self._file = None +@deconstructible class GoogleCloudStorage(Storage): client_class = Client bucket_class = Bucket From 1ddc32ba10bc17f44aa329d2ce667956e4826a1a Mon Sep 17 00:00:00 2001 From: Jody McIntyre Date: Thu, 5 Jan 2017 00:00:25 +0000 Subject: [PATCH 086/174] Remove MIME type guessing According to https://googlecloudplatform.github.io/google-cloud-python/stable/storage-blobs.html#google.cloud.storage.blob.Blob.upload_from_file it's not actually necessary to specify a MIME type. --- storages/backends/gcloud.py | 10 ++-------- 1 file changed, 2 insertions(+), 8 deletions(-) diff --git a/storages/backends/gcloud.py b/storages/backends/gcloud.py index 6d8626f75..bf97f8087 100644 --- a/storages/backends/gcloud.py +++ b/storages/backends/gcloud.py @@ -1,4 +1,3 @@ -import mimetypes from tempfile import SpooledTemporaryFile from django.core.exceptions import ImproperlyConfigured @@ -64,10 +63,7 @@ def close(self): if self._file is not None: if self._is_dirty: self.file.seek(0) - content_type, _ = mimetypes.guess_type(self.name) - content_type = getattr(self.file, 'content_type', content_type) - size = getattr(self.file, 'size') - self.blob.upload_from_file(self.file, content_type=content_type, size=size) + self.blob.upload_from_file(self.file) self._file.close() self._file = None @@ -172,14 +168,12 @@ def _open(self, name, mode='rb'): def _save(self, name, content): cleaned_name = self._clean_name(name) name = self._normalize_name(cleaned_name) - content_type, _ = mimetypes.guess_type(name) - content_type = getattr(content, 'content_type', content_type) size = getattr(content, 'size') content.name = cleaned_name encoded_name = self._encode_name(name) file = self.file_class(encoded_name, 'rw', self) - file.blob.upload_from_file(content, content_type=content_type, size=size) + file.blob.upload_from_file(content, size=size) return cleaned_name def delete(self, name): From 8105cbadb5ff72519d55c05eb13fb9b3fff0a647 Mon Sep 17 00:00:00 2001 From: Jody McIntyre Date: Thu, 5 Jan 2017 00:01:13 +0000 Subject: [PATCH 087/174] Fix name in error message --- storages/backends/gcloud.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/storages/backends/gcloud.py b/storages/backends/gcloud.py index bf97f8087..96afd689c 100644 --- a/storages/backends/gcloud.py +++ b/storages/backends/gcloud.py @@ -13,7 +13,7 @@ from google.cloud.storage.blob import Blob from google.cloud.exceptions import NotFound except ImportError: - raise ImproperlyConfigured("Could not load Google Storage bindings.\n" + raise ImproperlyConfigured("Could not load Google Cloud Storage bindings.\n" "See https://github.com/GoogleCloudPlatform/gcloud-python") From 7cb67d57bfa9df3a28ca631da057b870a1cf201d Mon Sep 17 00:00:00 2001 From: Jody McIntyre Date: Thu, 5 Jan 2017 19:43:23 +0000 Subject: [PATCH 088/174] Move clean_name to utils We want to use this in the Google Cloud Storage backend as well. This is a separate commit so we can verify the clean_name tests in s3boto still pass (before I move those as well). --- storages/backends/s3boto.py | 13 ++----------- storages/utils.py | 18 ++++++++++++++++++ 2 files changed, 20 insertions(+), 11 deletions(-) diff --git a/storages/backends/s3boto.py b/storages/backends/s3boto.py index 843f61b41..77a77de44 100644 --- a/storages/backends/s3boto.py +++ b/storages/backends/s3boto.py @@ -1,5 +1,4 @@ import os -import posixpath import mimetypes from datetime import datetime from gzip import GzipFile @@ -24,7 +23,7 @@ raise ImproperlyConfigured("Could not load Boto's S3 bindings.\n" "See https://github.com/boto/boto") -from storages.utils import setting +from storages.utils import clean_name, setting boto_version_info = tuple([int(i) for i in boto_version.split('-')[0].split('.')]) @@ -348,15 +347,7 @@ def _clean_name(self, name): """ Cleans the name so that Windows style paths work """ - # Normalize Windows style paths - clean_name = posixpath.normpath(name).replace('\\', '/') - - # os.path.normpath() can strip trailing slashes so we implement - # a workaround here. - if name.endswith('/') and not clean_name.endswith('/'): - # Add a trailing slash as it was stripped. - clean_name += '/' - return clean_name + return clean_name(name) def _normalize_name(self, name): """ diff --git a/storages/utils.py b/storages/utils.py index 2f501b194..5e02f44b3 100644 --- a/storages/utils.py +++ b/storages/utils.py @@ -1,3 +1,5 @@ +import posixpath + from django.conf import settings from django.core.exceptions import ImproperlyConfigured @@ -20,3 +22,19 @@ def setting(name, default=None, strict=False): msg = "You must provide settings.%s" % name raise ImproperlyConfigured(msg) return getattr(settings, name, default) + + +def clean_name(name): + """ + Cleans the name so that Windows style paths work + """ + # Normalize Windows style paths + clean_name = posixpath.normpath(name).replace('\\', '/') + + # os.path.normpath() can strip trailing slashes so we implement + # a workaround here. + if name.endswith('/') and not clean_name.endswith('/'): + # Add a trailing slash as it was stripped. + return clean_name + '/' + else: + return clean_name From 159b003fefafcad3cb790bed9db6eac5e910e336 Mon Sep 17 00:00:00 2001 From: Jody McIntyre Date: Thu, 5 Jan 2017 20:11:40 +0000 Subject: [PATCH 089/174] Move clean_name tests to test_utils --- tests/test_s3boto.py | 24 ++---------------------- tests/test_utils.py | 30 ++++++++++++++++++++++++++++++ 2 files changed, 32 insertions(+), 22 deletions(-) diff --git a/tests/test_s3boto.py b/tests/test_s3boto.py index f2a55476b..217e93705 100644 --- a/tests/test_s3boto.py +++ b/tests/test_s3boto.py @@ -70,32 +70,12 @@ class S3BotoStorageTests(S3BotoTestCase): def test_clean_name(self): """ - Test the base case of _clean_name + Test the base case of _clean_name - more tests are performed in + test_utils """ path = self.storage._clean_name("path/to/somewhere") self.assertEqual(path, "path/to/somewhere") - def test_clean_name_normalize(self): - """ - Test the normalization of _clean_name - """ - path = self.storage._clean_name("path/to/../somewhere") - self.assertEqual(path, "path/somewhere") - - def test_clean_name_trailing_slash(self): - """ - Test the _clean_name when the path has a trailing slash - """ - path = self.storage._clean_name("path/to/somewhere/") - self.assertEqual(path, "path/to/somewhere/") - - def test_clean_name_windows(self): - """ - Test the _clean_name when the path has a trailing slash - """ - path = self.storage._clean_name("path\\to\\somewhere") - self.assertEqual(path, "path/to/somewhere") - def test_storage_url_slashes(self): """ Test URL generation. diff --git a/tests/test_utils.py b/tests/test_utils.py index 2e804b25e..b5a682e03 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -14,3 +14,33 @@ def test_setting_unfound(self): self.assertEqual(utils.setting('FOO', 'bar'), 'bar') with self.assertRaises(ImproperlyConfigured): utils.setting('FOO', strict=True) + + +class CleanNameTests(TestCase): + def test_clean_name(self): + """ + Test the base case of clean_name + """ + path = utils.clean_name("path/to/somewhere") + self.assertEqual(path, "path/to/somewhere") + + def test_clean_name_normalize(self): + """ + Test the normalization of clean_name + """ + path = utils.clean_name("path/to/../somewhere") + self.assertEqual(path, "path/somewhere") + + def test_clean_name_trailing_slash(self): + """ + Test the clean_name when the path has a trailing slash + """ + path = utils.clean_name("path/to/somewhere/") + self.assertEqual(path, "path/to/somewhere/") + + def test_clean_name_windows(self): + """ + Test the clean_name when the path has a trailing slash + """ + path = utils.clean_name("path\\to\\somewhere") + self.assertEqual(path, "path/to/somewhere") From 621391ac671d3d41fbcfc05dd184e77f4f8bdf81 Mon Sep 17 00:00:00 2001 From: Jody McIntyre Date: Thu, 5 Jan 2017 20:11:53 +0000 Subject: [PATCH 090/174] Use utils.clean_name() --- storages/backends/gcloud.py | 13 ++----------- 1 file changed, 2 insertions(+), 11 deletions(-) diff --git a/storages/backends/gcloud.py b/storages/backends/gcloud.py index 96afd689c..2771650f6 100644 --- a/storages/backends/gcloud.py +++ b/storages/backends/gcloud.py @@ -5,7 +5,7 @@ from django.utils.deconstruct import deconstructible from django.utils.encoding import force_bytes, force_text, smart_str from storages.compat import Storage -from storages.utils import setting +from storages.utils import clean_name, setting try: from google.cloud.storage.client import Client @@ -141,16 +141,7 @@ def _clean_name(self, name): """ Cleans the name so that Windows style paths work """ - # Normalize Windows style paths - clean_name = name.replace('\\', '/') - - # os.path.normpath() can strip trailing slashes so we implement - # a workaround here. - if name.endswith('/') and not clean_name.endswith('/'): - # Add a trailing slash as it was stripped. - return clean_name + '/' - else: - return clean_name + return clean_name(name) def _encode_name(self, name): return smart_str(name, encoding=self.file_name_charset) From 058972f1cef90f2aa0a73937214d2479597d7b40 Mon Sep 17 00:00:00 2001 From: Jody McIntyre Date: Thu, 5 Jan 2017 20:15:24 +0000 Subject: [PATCH 091/174] Remove _normalize_name This is used by s3boto to support the AWS_LOCATION setting, which isn't needed by Google Cloud Storage --- storages/backends/gcloud.py | 28 ++++++++++------------------ 1 file changed, 10 insertions(+), 18 deletions(-) diff --git a/storages/backends/gcloud.py b/storages/backends/gcloud.py index 2771650f6..e0afd09c1 100644 --- a/storages/backends/gcloud.py +++ b/storages/backends/gcloud.py @@ -130,13 +130,6 @@ def _get_or_create_bucket(self, name): "setting GS_AUTO_CREATE_BUCKET to " "``True``." % name) - def _normalize_name(self, name): - """ - No normalizing supported. This can be implemented later. - TODO: Implement normalizing, like the s3boto backend. - """ - return name - def _clean_name(self, name): """ Cleans the name so that Windows style paths work @@ -150,25 +143,24 @@ def _decode_name(self, name): return force_text(name, encoding=self.file_name_charset) def _open(self, name, mode='rb'): - name = self._normalize_name(self._clean_name(name)) + name = self._clean_name(name) file_object = self.file_class(name, mode, self) if not file_object.blob: raise IOError('File does not exist: %s' % name) return file_object def _save(self, name, content): - cleaned_name = self._clean_name(name) - name = self._normalize_name(cleaned_name) + name = self._clean_name(name) size = getattr(content, 'size') - content.name = cleaned_name + content.name = name encoded_name = self._encode_name(name) file = self.file_class(encoded_name, 'rw', self) file.blob.upload_from_file(content, size=size) - return cleaned_name + return name def delete(self, name): - name = self._normalize_name(self._clean_name(name)) + name = self._clean_name(name) self.bucket.delete_blob(self._encode_name(name)) def exists(self, name): @@ -179,11 +171,11 @@ def exists(self, name): except ImproperlyConfigured: return False - name = self._normalize_name(self._clean_name(name)) + name = self._clean_name(name) return bool(self.bucket.get_blob(self._encode_name(name))) def listdir(self, name): - name = self._normalize_name(self._clean_name(name)) + name = self._clean_name(name) # for the bucket.list and logic below name needs to end in / # But for the root path "" we leave it as an empty string if name and not name.endswith('/'): @@ -206,18 +198,18 @@ def listdir(self, name): return list(dirs), files def size(self, name): - name = self._encode_name(self._normalize_name(self._clean_name(name))) + name = self._encode_name(self._clean_name(name)) blob = self.bucket.get_blob(self._encode_name(name)) return blob.size if blob else 0 def modified_time(self, name): - name = self._normalize_name(self._clean_name(name)) + name = self._clean_name(name) blob = self.bucket.get_blob(self._encode_name(name)) return blob.updated if blob else None def url(self, name): # Preserve the trailing slash after normalizing the path. - name = self._normalize_name(self._clean_name(name)) + name = self._clean_name(name) blob = self.bucket.get_blob(self._encode_name(name)) return blob.public_url if blob else None From cbe2530c419e7aedefed7ab8fc064123586c3bd8 Mon Sep 17 00:00:00 2001 From: Jody McIntyre Date: Mon, 9 Jan 2017 19:13:26 +0000 Subject: [PATCH 092/174] Remove unused import and class variable --- storages/backends/gcloud.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/storages/backends/gcloud.py b/storages/backends/gcloud.py index e0afd09c1..e994fec5c 100644 --- a/storages/backends/gcloud.py +++ b/storages/backends/gcloud.py @@ -9,7 +9,6 @@ try: from google.cloud.storage.client import Client - from google.cloud.storage.bucket import Bucket from google.cloud.storage.blob import Blob from google.cloud.exceptions import NotFound except ImportError: @@ -71,7 +70,6 @@ def close(self): @deconstructible class GoogleCloudStorage(Storage): client_class = Client - bucket_class = Bucket file_class = GoogleCloudFile not_found_exception = NotFound From 189233a7bb67297484219465d1fad8196b53625b Mon Sep 17 00:00:00 2001 From: Jody McIntyre Date: Wed, 11 Jan 2017 00:13:13 +0000 Subject: [PATCH 093/174] Move safe_join to utils --- storages/backends/s3boto.py | 36 +---------------------------- storages/utils.py | 45 ++++++++++++++++++++++++++++++++++--- tests/test_s3boto.py | 38 ------------------------------- tests/test_utils.py | 37 ++++++++++++++++++++++++++++++ 4 files changed, 80 insertions(+), 76 deletions(-) diff --git a/storages/backends/s3boto.py b/storages/backends/s3boto.py index 77a77de44..ff5ed792e 100644 --- a/storages/backends/s3boto.py +++ b/storages/backends/s3boto.py @@ -10,7 +10,6 @@ from django.utils.deconstruct import deconstructible from django.utils.encoding import force_text, smart_str, filepath_to_uri, force_bytes from django.utils.six import BytesIO -from django.utils.six.moves.urllib import parse as urlparse from django.utils import timezone as tz try: @@ -23,7 +22,7 @@ raise ImproperlyConfigured("Could not load Boto's S3 bindings.\n" "See https://github.com/boto/boto") -from storages.utils import clean_name, setting +from storages.utils import clean_name, safe_join, setting boto_version_info = tuple([int(i) for i in boto_version.split('-')[0].split('.')]) @@ -32,39 +31,6 @@ "higher.\nSee https://github.com/boto/boto") -def safe_join(base, *paths): - """ - A version of django.utils._os.safe_join for S3 paths. - - Joins one or more path components to the base path component - intelligently. Returns a normalized version of the final path. - - The final path must be located inside of the base path component - (otherwise a ValueError is raised). - - Paths outside the base path indicate a possible security - sensitive operation. - """ - base_path = force_text(base) - base_path = base_path.rstrip('/') - paths = [force_text(p) for p in paths] - - final_path = base_path - for path in paths: - final_path = urlparse.urljoin(final_path.rstrip('/') + '/', path) - - # Ensure final_path starts with base_path and that the next character after - # the final path is '/' (or nothing, in which case final_path must be - # equal to base_path). - base_path_len = len(base_path) - if (not final_path.startswith(base_path) or - final_path[base_path_len:base_path_len + 1] not in ('', '/')): - raise ValueError('the joined path is located outside of the base path' - ' component') - - return final_path.lstrip('/') - - @deconstructible class S3BotoStorageFile(File): """ diff --git a/storages/utils.py b/storages/utils.py index 5e02f44b3..60967d869 100644 --- a/storages/utils.py +++ b/storages/utils.py @@ -2,6 +2,8 @@ from django.conf import settings from django.core.exceptions import ImproperlyConfigured +from django.utils.encoding import force_text +from django.utils.six.moves.urllib import parse as urlparse def setting(name, default=None, strict=False): @@ -35,6 +37,43 @@ def clean_name(name): # a workaround here. if name.endswith('/') and not clean_name.endswith('/'): # Add a trailing slash as it was stripped. - return clean_name + '/' - else: - return clean_name + clean_name = clean_name + '/' + + # Given an empty string, os.path.normpath() will return ., which we don't want + if clean_name == '.': + clean_name = '' + + return clean_name + + +def safe_join(base, *paths): + """ + A version of django.utils._os.safe_join for S3 paths. + + Joins one or more path components to the base path component + intelligently. Returns a normalized version of the final path. + + The final path must be located inside of the base path component + (otherwise a ValueError is raised). + + Paths outside the base path indicate a possible security + sensitive operation. + """ + base_path = force_text(base) + base_path = base_path.rstrip('/') + paths = [force_text(p) for p in paths] + + final_path = base_path + for path in paths: + final_path = urlparse.urljoin(final_path.rstrip('/') + '/', path) + + # Ensure final_path starts with base_path and that the next character after + # the final path is '/' (or nothing, in which case final_path must be + # equal to base_path). + base_path_len = len(base_path) + if (not final_path.startswith(base_path) or + final_path[base_path_len:base_path_len + 1] not in ('', '/')): + raise ValueError('the joined path is located outside of the base path' + ' component') + + return final_path.lstrip('/') diff --git a/tests/test_s3boto.py b/tests/test_s3boto.py index 217e93705..685c2d3aa 100644 --- a/tests/test_s3boto.py +++ b/tests/test_s3boto.py @@ -17,7 +17,6 @@ from storages.backends import s3boto __all__ = ( - 'SafeJoinTest', 'S3BotoStorageTests', ) @@ -29,43 +28,6 @@ def setUp(self, S3Connection): self.storage._connection = mock.MagicMock() -class SafeJoinTest(TestCase): - def test_normal(self): - path = s3boto.safe_join("", "path/to/somewhere", "other", "path/to/somewhere") - self.assertEqual(path, "path/to/somewhere/other/path/to/somewhere") - - def test_with_dot(self): - path = s3boto.safe_join("", "path/./somewhere/../other", "..", - ".", "to/./somewhere") - self.assertEqual(path, "path/to/somewhere") - - def test_base_url(self): - path = s3boto.safe_join("base_url", "path/to/somewhere") - self.assertEqual(path, "base_url/path/to/somewhere") - - def test_base_url_with_slash(self): - path = s3boto.safe_join("base_url/", "path/to/somewhere") - self.assertEqual(path, "base_url/path/to/somewhere") - - def test_suspicious_operation(self): - self.assertRaises(ValueError, - s3boto.safe_join, "base", "../../../../../../../etc/passwd") - - def test_trailing_slash(self): - """ - Test safe_join with paths that end with a trailing slash. - """ - path = s3boto.safe_join("base_url/", "path/to/somewhere/") - self.assertEqual(path, "base_url/path/to/somewhere/") - - def test_trailing_slash_multi(self): - """ - Test safe_join with multiple paths that end with a trailing slash. - """ - path = s3boto.safe_join("base_url/", "path/to/" "somewhere/") - self.assertEqual(path, "base_url/path/to/somewhere/") - - class S3BotoStorageTests(S3BotoTestCase): def test_clean_name(self): diff --git a/tests/test_utils.py b/tests/test_utils.py index b5a682e03..2a892e283 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -44,3 +44,40 @@ def test_clean_name_windows(self): """ path = utils.clean_name("path\\to\\somewhere") self.assertEqual(path, "path/to/somewhere") + + +class SafeJoinTest(TestCase): + def test_normal(self): + path = utils.safe_join("", "path/to/somewhere", "other", "path/to/somewhere") + self.assertEqual(path, "path/to/somewhere/other/path/to/somewhere") + + def test_with_dot(self): + path = utils.safe_join("", "path/./somewhere/../other", "..", + ".", "to/./somewhere") + self.assertEqual(path, "path/to/somewhere") + + def test_base_url(self): + path = utils.safe_join("base_url", "path/to/somewhere") + self.assertEqual(path, "base_url/path/to/somewhere") + + def test_base_url_with_slash(self): + path = utils.safe_join("base_url/", "path/to/somewhere") + self.assertEqual(path, "base_url/path/to/somewhere") + + def test_suspicious_operation(self): + self.assertRaises(ValueError, + utils.safe_join, "base", "../../../../../../../etc/passwd") + + def test_trailing_slash(self): + """ + Test safe_join with paths that end with a trailing slash. + """ + path = utils.safe_join("base_url/", "path/to/somewhere/") + self.assertEqual(path, "base_url/path/to/somewhere/") + + def test_trailing_slash_multi(self): + """ + Test safe_join with multiple paths that end with a trailing slash. + """ + path = utils.safe_join("base_url/", "path/to/" "somewhere/") + self.assertEqual(path, "base_url/path/to/somewhere/") From 3829f9a1abf0f3ec0242572d1303a1d9f9c1d40d Mon Sep 17 00:00:00 2001 From: Jody McIntyre Date: Wed, 11 Jan 2017 00:23:28 +0000 Subject: [PATCH 094/174] Add and use _normalize_name() like in s3boto We need this even though we don't have "location" support since it undoes some of the weirdness (e.g. adding './' at the beginning of some paths) done by clean_name(). It would be possible to do this more simply but doing it this way makes things consistent with s3boto. --- storages/backends/gcloud.py | 31 ++++++++++++++++++++----------- 1 file changed, 20 insertions(+), 11 deletions(-) diff --git a/storages/backends/gcloud.py b/storages/backends/gcloud.py index e994fec5c..f5e26f4e0 100644 --- a/storages/backends/gcloud.py +++ b/storages/backends/gcloud.py @@ -5,7 +5,7 @@ from django.utils.deconstruct import deconstructible from django.utils.encoding import force_bytes, force_text, smart_str from storages.compat import Storage -from storages.utils import clean_name, setting +from storages.utils import clean_name, safe_join, setting try: from google.cloud.storage.client import Client @@ -134,6 +134,14 @@ def _clean_name(self, name): """ return clean_name(name) + def _normalize_name(self, name): + """ + Normalizes the name so that paths like /path/to/ignored/../something.txt + and ./file.txt work. Note that clean_name adds ./ to some paths so + they need to be fixed here. + """ + return safe_join('', name) + def _encode_name(self, name): return smart_str(name, encoding=self.file_name_charset) @@ -141,24 +149,25 @@ def _decode_name(self, name): return force_text(name, encoding=self.file_name_charset) def _open(self, name, mode='rb'): - name = self._clean_name(name) + name = self._normalize_name(self._clean_name(name)) file_object = self.file_class(name, mode, self) if not file_object.blob: raise IOError('File does not exist: %s' % name) return file_object def _save(self, name, content): - name = self._clean_name(name) + cleaned_name = self._clean_name(name) + name = self._normalize_name(cleaned_name) size = getattr(content, 'size') - content.name = name + content.name = cleaned_name encoded_name = self._encode_name(name) file = self.file_class(encoded_name, 'rw', self) file.blob.upload_from_file(content, size=size) - return name + return cleaned_name def delete(self, name): - name = self._clean_name(name) + name = self._normalize_name(self._clean_name(name)) self.bucket.delete_blob(self._encode_name(name)) def exists(self, name): @@ -169,11 +178,11 @@ def exists(self, name): except ImproperlyConfigured: return False - name = self._clean_name(name) + name = self._normalize_name(self._clean_name(name)) return bool(self.bucket.get_blob(self._encode_name(name))) def listdir(self, name): - name = self._clean_name(name) + name = self._normalize_name(self._clean_name(name)) # for the bucket.list and logic below name needs to end in / # But for the root path "" we leave it as an empty string if name and not name.endswith('/'): @@ -196,18 +205,18 @@ def listdir(self, name): return list(dirs), files def size(self, name): - name = self._encode_name(self._clean_name(name)) + name = self._normalize_name(self._clean_name(name)) blob = self.bucket.get_blob(self._encode_name(name)) return blob.size if blob else 0 def modified_time(self, name): - name = self._clean_name(name) + name = self._normalize_name(self._clean_name(name)) blob = self.bucket.get_blob(self._encode_name(name)) return blob.updated if blob else None def url(self, name): # Preserve the trailing slash after normalizing the path. - name = self._clean_name(name) + name = self._normalize_name(self._clean_name(name)) blob = self.bucket.get_blob(self._encode_name(name)) return blob.public_url if blob else None From e16800e9ccf179bb493adc438582db5b682582cd Mon Sep 17 00:00:00 2001 From: Jody McIntyre Date: Wed, 11 Jan 2017 19:25:29 +0000 Subject: [PATCH 095/174] Remove unused function --- storages/backends/gcloud.py | 3 --- 1 file changed, 3 deletions(-) diff --git a/storages/backends/gcloud.py b/storages/backends/gcloud.py index f5e26f4e0..11ea12b7f 100644 --- a/storages/backends/gcloud.py +++ b/storages/backends/gcloud.py @@ -145,9 +145,6 @@ def _normalize_name(self, name): def _encode_name(self, name): return smart_str(name, encoding=self.file_name_charset) - def _decode_name(self, name): - return force_text(name, encoding=self.file_name_charset) - def _open(self, name, mode='rb'): name = self._normalize_name(self._clean_name(name)) file_object = self.file_class(name, mode, self) From 0d6151375376c3ff3cac68969a251ab9bdda68e4 Mon Sep 17 00:00:00 2001 From: Jody McIntyre Date: Wed, 11 Jan 2017 19:25:38 +0000 Subject: [PATCH 096/174] Only create a Blob in write mode This is needed so we can detect attempts to open nonexistent files in read mode. --- storages/backends/gcloud.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/storages/backends/gcloud.py b/storages/backends/gcloud.py index 11ea12b7f..18133e13a 100644 --- a/storages/backends/gcloud.py +++ b/storages/backends/gcloud.py @@ -21,7 +21,9 @@ def __init__(self, name, mode, storage, buffer_size=None): self.name = name self._mode = mode self._storage = storage - self.blob = Blob(self.name, storage.bucket) + self.blob = storage.bucket.get_blob(name) + if not self.blob and 'w' in mode: + self.blob = Blob(self.name, storage.bucket) self._file = None self._is_dirty = False From dfe90b5a0aec0d07cc025c231728fb7f0bbf2bd5 Mon Sep 17 00:00:00 2001 From: Jody McIntyre Date: Wed, 11 Jan 2017 19:34:22 +0000 Subject: [PATCH 097/174] Add tests of Google Cloud Storage --- tests/test_gcloud.py | 210 +++++++++++++++++++++++++++++++++++++++++++ tox.ini | 1 + 2 files changed, 211 insertions(+) create mode 100644 tests/test_gcloud.py diff --git a/tests/test_gcloud.py b/tests/test_gcloud.py new file mode 100644 index 000000000..9209304ad --- /dev/null +++ b/tests/test_gcloud.py @@ -0,0 +1,210 @@ +try: + from unittest import mock +except ImportError: # Python 3.2 and below + import mock + +import datetime + +from django.core.exceptions import ImproperlyConfigured +from django.core.files.base import ContentFile +from django.test import TestCase + +from google.cloud.storage.blob import Blob + +from storages.backends import gcloud + + +class GCloudTestCase(TestCase): + def setUp(self): + self.bucket_name = 'test_bucket' + self.filename = 'test_file.txt' + + self.storage = gcloud.GoogleCloudStorage(bucket_name=self.bucket_name) + self.storage.client_class = mock.MagicMock + + +class GCloudStorageTests(GCloudTestCase): + + def test_clean_name(self): + """ + Test the base case of _clean_name - more tests are performed in + test_utils + """ + path = self.storage._clean_name("path/to/somewhere") + self.assertEqual(path, "path/to/somewhere") + + def test_open_read(self): + """ + Test opening a file and reading from it + """ + data = 'This is some test read data.' + + f = self.storage.open(self.filename) + self.storage._client.get_bucket.assert_called_with(self.bucket_name) + self.storage._bucket.get_blob.assert_called_with(self.filename) + + f.blob.download_to_file = lambda tmpfile: tmpfile.write(data) + self.assertEqual(f.read(), data) + + def test_open_read_nonexistent(self): + self.storage._bucket = mock.MagicMock() + self.storage._bucket.get_blob.return_value = None + + self.assertRaises(IOError, self.storage.open, self.filename) + self.storage._bucket.get_blob.assert_called_with(self.filename) + + @mock.patch('storages.backends.gcloud.Blob') + def test_open_write(self, MockBlob): + """ + Test opening a file and writing to it + """ + data = 'This is some test write data.' + + # Simulate the file not existing before the write + self.storage._bucket = mock.MagicMock() + self.storage._bucket.get_blob.return_value = None + + f = self.storage.open(self.filename, 'wb') + MockBlob.assert_called_with(self.filename, self.storage._bucket) + + f.write(data) + tmpfile = f._file + # File data is not actually written until close(), so do that. + f.close() + + MockBlob().upload_from_file.assert_called_with(tmpfile) + + def test_save(self): + data = 'This is some test content.' + content = ContentFile(data) + + self.storage.save(self.filename, content) + + self.storage._client.get_bucket.assert_called_with(self.bucket_name) + self.storage._bucket.get_blob().upload_from_file.assert_called_with( + content, size=len(data)) + + def test_delete(self): + self.storage.delete(self.filename) + + self.storage._client.get_bucket.assert_called_with(self.bucket_name) + self.storage._bucket.delete_blob.assert_called_with(self.filename) + + def test_exists(self): + self.storage._bucket = mock.MagicMock() + self.assertTrue(self.storage.exists(self.filename)) + self.storage._bucket.get_blob.assert_called_with(self.filename) + + self.storage._bucket.reset_mock() + self.storage._bucket.get_blob.return_value = None + self.assertFalse(self.storage.exists(self.filename)) + self.storage._bucket.get_blob.assert_called_with(self.filename) + + def test_exists_no_bucket(self): + # exists('') should return False if the bucket doesn't exist + self.storage._client = mock.MagicMock() + self.storage._client.get_bucket.side_effect = self.storage.not_found_exception( + 'dang') + self.assertFalse(self.storage.exists('')) + + def test_exists_bucket(self): + # exists('') should return True if the bucket exists + self.assertTrue(self.storage.exists('')) + + def test_exists_bucket_auto_create(self): + # exists('') should automatically create the bucket if + # auto_create_bucket is configured + self.storage.auto_create_bucket = True + self.storage._client = mock.MagicMock() + self.storage._client.get_bucket.side_effect = self.storage.not_found_exception( + 'dang') + + self.assertTrue(self.storage.exists('')) + self.storage._client.create_bucket.assert_called_with(self.bucket_name) + + def test_listdir(self): + file_names = ["some/path/1.txt", "2.txt", "other/path/3.txt", "4.txt"] + + self.storage._bucket = mock.MagicMock() + self.storage._bucket.list_blobs.return_value = [] + for name in file_names: + blob = mock.MagicMock(spec=Blob) + blob.name = name + self.storage._bucket.list_blobs.return_value.append(blob) + + dirs, files = self.storage.listdir('') + + self.assertEqual(len(dirs), 2) + for directory in ["some", "other"]: + self.assertTrue(directory in dirs, + """ "%s" not in directory list "%s".""" % ( + directory, dirs)) + + self.assertEqual(len(files), 2) + for filename in ["2.txt", "4.txt"]: + self.assertTrue(filename in files, + """ "%s" not in file list "%s".""" % ( + filename, files)) + + def test_listdir_subdir(self): + file_names = ["some/path/1.txt", "some/2.txt"] + + self.storage._bucket = mock.MagicMock() + self.storage._bucket.list_blobs.return_value = [] + for name in file_names: + blob = mock.MagicMock(spec=Blob) + blob.name = name + self.storage._bucket.list_blobs.return_value.append(blob) + + dirs, files = self.storage.listdir('some/') + + self.assertEqual(len(dirs), 1) + self.assertTrue('path' in dirs, + """ "path" not in directory list "%s".""" % (dirs,)) + + self.assertEqual(len(files), 1) + self.assertTrue('2.txt' in files, + """ "2.txt" not in files list "%s".""" % (files,)) + + def test_size(self): + size = 1234 + + self.storage._bucket = mock.MagicMock() + blob = mock.MagicMock() + blob.size = size + self.storage._bucket.get_blob.return_value = blob + + self.assertEqual(self.storage.size(self.filename), size) + self.storage._bucket.get_blob.assert_called_with(self.filename) + + def test_modified_time(self): + date = datetime.datetime(2017, 1, 2, 3, 4, 5, 678) + + self.storage._bucket = mock.MagicMock() + blob = mock.MagicMock() + blob.updated = date + self.storage._bucket.get_blob.return_value = blob + + self.assertEqual(self.storage.modified_time(self.filename), date) + self.storage._bucket.get_blob.assert_called_with(self.filename) + + def test_url(self): + url = 'https://example.com/mah-bukkit/{}'.format(self.filename) + + self.storage._bucket = mock.MagicMock() + blob = mock.MagicMock() + blob.public_url = url + self.storage._bucket.get_blob.return_value = blob + + self.assertEqual(self.storage.url(self.filename), url) + self.storage._bucket.get_blob.assert_called_with(self.filename) + + def test_get_available_name(self): + self.storage.file_overwrite = True + self.assertEqual(self.storage.get_available_name(self.filename), self.filename) + + self.storage._bucket = mock.MagicMock() + self.storage._bucket.get_blob.return_value = None + self.storage.file_overwrite = False + self.assertEqual(self.storage.get_available_name(self.filename), self.filename) + self.storage._bucket.get_blob.assert_called_with(self.filename) diff --git a/tox.ini b/tox.ini index eaa765377..24fa21375 100644 --- a/tox.ini +++ b/tox.ini @@ -22,3 +22,4 @@ deps = boto3>=1.2.3 dropbox>=3.24 paramiko + google-cloud-storage>=0.22.0 From 9ad54cb66fe70852c3de7cdaa16ec302b59bcd94 Mon Sep 17 00:00:00 2001 From: Jody McIntyre Date: Wed, 11 Jan 2017 21:18:44 +0000 Subject: [PATCH 098/174] Add documentation for Google Cloud Storage --- docs/backends/gcloud.rst | 190 +++++++++++++++++++++++++++++++++++++++ 1 file changed, 190 insertions(+) create mode 100644 docs/backends/gcloud.rst diff --git a/docs/backends/gcloud.rst b/docs/backends/gcloud.rst new file mode 100644 index 000000000..21f8e51e7 --- /dev/null +++ b/docs/backends/gcloud.rst @@ -0,0 +1,190 @@ +Google Cloud Storage +==================== + +Usage +***** + +This backend provides support for Google Cloud Storage using the +library provided by Google. + +It's possible to access Google Cloud Storage in S3 compatibility mode +using other libraries in django-storages, but this is the only library +offering native support. + +By default this library will use the credentials associated with the +current instance for authentication. To override this, see the +settings below. + + +Settings +-------- + +To use gcloud set:: + + DEFAULT_FILE_STORAGE = 'storages.backends.gcloud.GoogleCloudStorage' + +``GS_BUCKET_NAME`` + +Your Google Storage bucket name, as a string. + +``GS_PROJECT_ID`` (optional) + +Your Google Cloud project ID. If unset, falls back to the default +inferred from the environment. + +``GS_CREDENTIALS`` (optional) + +The OAuth 2 credentials to use for the connection. If unset, falls +back to the default inferred from the environment. + +``GS_AUTO_CREATE_BUCKET`` (optional, default is ``False``) + +If True, attempt to create the bucket if it does not exist. + +``GS_DEFAULT_ACL`` (optional) + +If set to ``private`` changes uploaded file's Access Control List from the default permission ``public-read`` to give owner full control and remove read access from everyone else. + +``GS_BUCKET_ACL`` (optional) + +ACL used when creating a new bucket; defaults to ``GS_DEFAULT_ACL``. + +``GS_FILE_CHARSET`` (optional) + +Allows overriding the character set used in filenames. + +``GS_FILE_OVERWRITE`` (optional: default is ``True``) + +By default files with the same name will overwrite each other. Set this to ``False`` to have extra characters appended. + +``GS_MAX_MEMORY_SIZE`` (optional) + +The maximum amount of memory a returned file can take up before being +rolled over into a temporary file on disk. Default is 0: Do not roll over. + +Fields +------ + +Once you're done, default_storage will be Google Cloud Storage:: + + >>> from django.core.files.storage import default_storage + >>> print default_storage.__class__ + + +This way, if you define a new FileField, it will use the Google Cloud Storage:: + + >>> from django.db import models + >>> class Resume(models.Model): + ... pdf = models.FileField(upload_to='pdfs') + ... photos = models.ImageField(upload_to='photos') + ... + >>> resume = Resume() + >>> print resume.pdf.storage + + +Storage +------- + +Standard file access options are available, and work as expected:: + + >>> default_storage.exists('storage_test') + False + >>> file = default_storage.open('storage_test', 'w') + >>> file.write('storage contents') + >>> file.close() + + >>> default_storage.exists('storage_test') + True + >>> file = default_storage.open('storage_test', 'r') + >>> file.read() + 'storage contents' + >>> file.close() + + >>> default_storage.delete('storage_test') + >>> default_storage.exists('storage_test') + False + +Model +----- + +An object without a file has limited functionality:: + + >>> obj1 = MyStorage() + >>> obj1.normal + + >>> obj1.normal.size + Traceback (most recent call last): + ... + ValueError: The 'normal' attribute has no file associated with it. + +Saving a file enables full functionality:: + + >>> obj1.normal.save('django_test.txt', ContentFile('content')) + >>> obj1.normal + + >>> obj1.normal.size + 7 + >>> obj1.normal.read() + 'content' + +Files can be read in a little at a time, if necessary:: + + >>> obj1.normal.open() + >>> obj1.normal.read(3) + 'con' + >>> obj1.normal.read() + 'tent' + >>> '-'.join(obj1.normal.chunks(chunk_size=2)) + 'co-nt-en-t' + +Save another file with the same name:: + + >>> obj2 = MyStorage() + >>> obj2.normal.save('django_test.txt', ContentFile('more content')) + >>> obj2.normal + + >>> obj2.normal.size + 12 + +Push the objects into the cache to make sure they pickle properly:: + + >>> cache.set('obj1', obj1) + >>> cache.set('obj2', obj2) + >>> cache.get('obj2').normal + + +Deleting an object deletes the file it uses, if there are no other objects still using that file:: + + >>> obj2.delete() + >>> obj2.normal.save('django_test.txt', ContentFile('more content')) + >>> obj2.normal + + +Default values allow an object to access a single file:: + + >>> obj3 = MyStorage.objects.create() + >>> obj3.default + + >>> obj3.default.read() + 'default content' + +But it shouldn't be deleted, even if there are no more objects using it:: + + >>> obj3.delete() + >>> obj3 = MyStorage() + >>> obj3.default.read() + 'default content' + +Verify the fix for #5655, making sure the directory is only determined once:: + + >>> obj4 = MyStorage() + >>> obj4.random.save('random_file', ContentFile('random content')) + >>> obj4.random + + +Clean up the temporary files:: + + >>> obj1.normal.delete() + >>> obj2.normal.delete() + >>> obj3.default.delete() + >>> obj4.random.delete() From b37a9cb7ba7d3d35c90221b0d7a18cfd729510f1 Mon Sep 17 00:00:00 2001 From: Jody McIntyre Date: Wed, 11 Jan 2017 21:32:01 +0000 Subject: [PATCH 099/174] Import Storage directly This fixes test failures caused by compat.py being removed as a result of dropping Django 1.7 support. --- storages/backends/gcloud.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/storages/backends/gcloud.py b/storages/backends/gcloud.py index 18133e13a..cdeef0424 100644 --- a/storages/backends/gcloud.py +++ b/storages/backends/gcloud.py @@ -2,9 +2,9 @@ from django.core.exceptions import ImproperlyConfigured from django.core.files.base import File +from django.core.files.storage import Storage from django.utils.deconstruct import deconstructible from django.utils.encoding import force_bytes, force_text, smart_str -from storages.compat import Storage from storages.utils import clean_name, safe_join, setting try: From 74f38416c42ea2c6a60601caba27b4766a35d3bd Mon Sep 17 00:00:00 2001 From: Jody McIntyre Date: Wed, 11 Jan 2017 22:40:41 +0000 Subject: [PATCH 100/174] Use byte string for test read Fixes tests in Python 3.x --- tests/test_gcloud.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/test_gcloud.py b/tests/test_gcloud.py index 9209304ad..54f8ff76d 100644 --- a/tests/test_gcloud.py +++ b/tests/test_gcloud.py @@ -37,7 +37,7 @@ def test_open_read(self): """ Test opening a file and reading from it """ - data = 'This is some test read data.' + data = b'This is some test read data.' f = self.storage.open(self.filename) self.storage._client.get_bucket.assert_called_with(self.bucket_name) From e8fc9fbdbee4e28d17866c135a891ba2c0781ca6 Mon Sep 17 00:00:00 2001 From: Jody McIntyre Date: Mon, 6 Feb 2017 16:04:22 +0000 Subject: [PATCH 101/174] Add Google Cloud Storage authors --- AUTHORS | 2 ++ 1 file changed, 2 insertions(+) diff --git a/AUTHORS b/AUTHORS index 0dc804966..86722f056 100644 --- a/AUTHORS +++ b/AUTHORS @@ -27,6 +27,8 @@ By order of apparition, thanks: * Michael Barrientos (S3 with Boto3) * piglei (patches) * Matt Braymer-Hayes (S3 with Boto3) + * Eirik Martiniussen Sylliaas (Google Cloud Storage native support) + * Jody McIntyre (Google Cloud Storage native support) Extra thanks to Marty for adding this in Django, you can buy his very interesting book (Pro Django). From 737ac5767568370b57766e7add29ba7b162f4e6b Mon Sep 17 00:00:00 2001 From: Jody McIntyre Date: Tue, 11 Apr 2017 20:10:07 +0000 Subject: [PATCH 102/174] Address review comments --- storages/backends/gcloud.py | 61 ++++++++++++++++++------------------- tests/test_gcloud.py | 40 ++++++++++++++++-------- 2 files changed, 56 insertions(+), 45 deletions(-) diff --git a/storages/backends/gcloud.py b/storages/backends/gcloud.py index cdeef0424..f79fcb5b8 100644 --- a/storages/backends/gcloud.py +++ b/storages/backends/gcloud.py @@ -71,11 +71,6 @@ def close(self): @deconstructible class GoogleCloudStorage(Storage): - client_class = Client - file_class = GoogleCloudFile - - not_found_exception = NotFound - project_id = setting('GS_PROJECT_ID', None) credentials = setting('GS_CREDENTIALS', None) bucket_name = setting('GS_BUCKET_NAME', None) @@ -101,7 +96,7 @@ def __init__(self, **settings): @property def client(self): if self._client is None: - self._client = self.client_class( + self._client = Client( project=self.project_id, credentials=self.credentials ) @@ -119,7 +114,7 @@ def _get_or_create_bucket(self, name): """ try: return self.client.get_bucket(name) - except self.not_found_exception: + except NotFound: if self.auto_create_bucket: bucket = self.client.create_bucket(name) bucket.acl.all().grant(self.bucket_acl) @@ -130,12 +125,6 @@ def _get_or_create_bucket(self, name): "setting GS_AUTO_CREATE_BUCKET to " "``True``." % name) - def _clean_name(self, name): - """ - Cleans the name so that Windows style paths work - """ - return clean_name(name) - def _normalize_name(self, name): """ Normalizes the name so that paths like /path/to/ignored/../something.txt @@ -148,25 +137,24 @@ def _encode_name(self, name): return smart_str(name, encoding=self.file_name_charset) def _open(self, name, mode='rb'): - name = self._normalize_name(self._clean_name(name)) - file_object = self.file_class(name, mode, self) + name = self._normalize_name(clean_name(name)) + file_object = GoogleCloudFile(name, mode, self) if not file_object.blob: raise IOError('File does not exist: %s' % name) return file_object def _save(self, name, content): - cleaned_name = self._clean_name(name) + cleaned_name = clean_name(name) name = self._normalize_name(cleaned_name) - size = getattr(content, 'size') content.name = cleaned_name encoded_name = self._encode_name(name) - file = self.file_class(encoded_name, 'rw', self) - file.blob.upload_from_file(content, size=size) + file = GoogleCloudFile(encoded_name, 'rw', self) + file.blob.upload_from_file(content, size=content.size) return cleaned_name def delete(self, name): - name = self._normalize_name(self._clean_name(name)) + name = self._normalize_name(clean_name(name)) self.bucket.delete_blob(self._encode_name(name)) def exists(self, name): @@ -177,11 +165,11 @@ def exists(self, name): except ImproperlyConfigured: return False - name = self._normalize_name(self._clean_name(name)) + name = self._normalize_name(clean_name(name)) return bool(self.bucket.get_blob(self._encode_name(name))) def listdir(self, name): - name = self._normalize_name(self._clean_name(name)) + name = self._normalize_name(clean_name(name)) # for the bucket.list and logic below name needs to end in / # But for the root path "" we leave it as an empty string if name and not name.endswith('/'): @@ -203,24 +191,33 @@ def listdir(self, name): dirs.add(parts[0]) return list(dirs), files + def _get_blob(self, name): + # Wrap google.cloud.storage's blob to raise if the file doesn't exist + blob = self.bucket.get_blob(name) + + if blob is None: + raise NotFound('File does not exist') + + return blob + def size(self, name): - name = self._normalize_name(self._clean_name(name)) - blob = self.bucket.get_blob(self._encode_name(name)) - return blob.size if blob else 0 + name = self._normalize_name(clean_name(name)) + blob = self._get_blob(self._encode_name(name)) + return blob.size def modified_time(self, name): - name = self._normalize_name(self._clean_name(name)) - blob = self.bucket.get_blob(self._encode_name(name)) - return blob.updated if blob else None + name = self._normalize_name(clean_name(name)) + blob = self._get_blob(self._encode_name(name)) + return blob.updated def url(self, name): # Preserve the trailing slash after normalizing the path. - name = self._normalize_name(self._clean_name(name)) - blob = self.bucket.get_blob(self._encode_name(name)) - return blob.public_url if blob else None + name = self._normalize_name(clean_name(name)) + blob = self._get_blob(self._encode_name(name)) + return blob.public_url def get_available_name(self, name, max_length=None): if self.file_overwrite: - name = self._clean_name(name) + name = clean_name(name) return name return super(GoogleCloudStorage, self).get_available_name(name, max_length) diff --git a/tests/test_gcloud.py b/tests/test_gcloud.py index 54f8ff76d..9c29d693c 100644 --- a/tests/test_gcloud.py +++ b/tests/test_gcloud.py @@ -9,6 +9,7 @@ from django.core.files.base import ContentFile from django.test import TestCase +from google.cloud.exceptions import NotFound from google.cloud.storage.blob import Blob from storages.backends import gcloud @@ -20,18 +21,15 @@ def setUp(self): self.filename = 'test_file.txt' self.storage = gcloud.GoogleCloudStorage(bucket_name=self.bucket_name) - self.storage.client_class = mock.MagicMock + self.client_patcher = mock.patch('storages.backends.gcloud.Client') + self.client_patcher.start() -class GCloudStorageTests(GCloudTestCase): + def tearDown(self): + self.client_patcher.stop() - def test_clean_name(self): - """ - Test the base case of _clean_name - more tests are performed in - test_utils - """ - path = self.storage._clean_name("path/to/somewhere") - self.assertEqual(path, "path/to/somewhere") + +class GCloudStorageTests(GCloudTestCase): def test_open_read(self): """ @@ -103,8 +101,7 @@ def test_exists(self): def test_exists_no_bucket(self): # exists('') should return False if the bucket doesn't exist self.storage._client = mock.MagicMock() - self.storage._client.get_bucket.side_effect = self.storage.not_found_exception( - 'dang') + self.storage._client.get_bucket.side_effect = NotFound('dang') self.assertFalse(self.storage.exists('')) def test_exists_bucket(self): @@ -116,8 +113,7 @@ def test_exists_bucket_auto_create(self): # auto_create_bucket is configured self.storage.auto_create_bucket = True self.storage._client = mock.MagicMock() - self.storage._client.get_bucket.side_effect = self.storage.not_found_exception( - 'dang') + self.storage._client.get_bucket.side_effect = NotFound('dang') self.assertTrue(self.storage.exists('')) self.storage._client.create_bucket.assert_called_with(self.bucket_name) @@ -177,6 +173,12 @@ def test_size(self): self.assertEqual(self.storage.size(self.filename), size) self.storage._bucket.get_blob.assert_called_with(self.filename) + def test_size_no_file(self): + self.storage._bucket = mock.MagicMock() + self.storage._bucket.get_blob.return_value = None + + self.assertRaises(NotFound, self.storage.size, self.filename) + def test_modified_time(self): date = datetime.datetime(2017, 1, 2, 3, 4, 5, 678) @@ -188,6 +190,12 @@ def test_modified_time(self): self.assertEqual(self.storage.modified_time(self.filename), date) self.storage._bucket.get_blob.assert_called_with(self.filename) + def test_modified_time_no_file(self): + self.storage._bucket = mock.MagicMock() + self.storage._bucket.get_blob.return_value = None + + self.assertRaises(NotFound, self.storage.modified_time, self.filename) + def test_url(self): url = 'https://example.com/mah-bukkit/{}'.format(self.filename) @@ -199,6 +207,12 @@ def test_url(self): self.assertEqual(self.storage.url(self.filename), url) self.storage._bucket.get_blob.assert_called_with(self.filename) + def test_url_no_file(self): + self.storage._bucket = mock.MagicMock() + self.storage._bucket.get_blob.return_value = None + + self.assertRaises(NotFound, self.storage.url, self.filename) + def test_get_available_name(self): self.storage.file_overwrite = True self.assertEqual(self.storage.get_available_name(self.filename), self.filename) From 44f39cd4f586baaef8bde44c47c7b2b848e22e63 Mon Sep 17 00:00:00 2001 From: Jody McIntyre Date: Tue, 11 Apr 2017 21:47:52 +0000 Subject: [PATCH 103/174] Fix modified_time; add get_modified_time `modified_time` always returns a naive datetime. `get_modified_time`'s return depends on the USE_TZ setting. --- storages/backends/gcloud.py | 9 ++++++++- tests/test_gcloud.py | 35 +++++++++++++++++++++++++++++++---- 2 files changed, 39 insertions(+), 5 deletions(-) diff --git a/storages/backends/gcloud.py b/storages/backends/gcloud.py index f79fcb5b8..2eac519f0 100644 --- a/storages/backends/gcloud.py +++ b/storages/backends/gcloud.py @@ -5,6 +5,7 @@ from django.core.files.storage import Storage from django.utils.deconstruct import deconstructible from django.utils.encoding import force_bytes, force_text, smart_str +from django.utils import timezone from storages.utils import clean_name, safe_join, setting try: @@ -208,7 +209,13 @@ def size(self, name): def modified_time(self, name): name = self._normalize_name(clean_name(name)) blob = self._get_blob(self._encode_name(name)) - return blob.updated + return timezone.make_naive(blob.updated) + + def get_modified_time(self, name): + name = self._normalize_name(clean_name(name)) + blob = self._get_blob(self._encode_name(name)) + updated = blob.updated + return updated if setting('USE_TZ') else timezone.make_naive(updated) def url(self, name): # Preserve the trailing slash after normalizing the path. diff --git a/tests/test_gcloud.py b/tests/test_gcloud.py index 9c29d693c..73063af94 100644 --- a/tests/test_gcloud.py +++ b/tests/test_gcloud.py @@ -7,6 +7,7 @@ from django.core.exceptions import ImproperlyConfigured from django.core.files.base import ContentFile +from django.utils import timezone from django.test import TestCase from google.cloud.exceptions import NotFound @@ -180,15 +181,41 @@ def test_size_no_file(self): self.assertRaises(NotFound, self.storage.size, self.filename) def test_modified_time(self): - date = datetime.datetime(2017, 1, 2, 3, 4, 5, 678) + naive_date = datetime.datetime(2017, 1, 2, 3, 4, 5, 678) + aware_date = timezone.make_aware(naive_date, timezone.utc) self.storage._bucket = mock.MagicMock() blob = mock.MagicMock() - blob.updated = date + blob.updated = aware_date self.storage._bucket.get_blob.return_value = blob - self.assertEqual(self.storage.modified_time(self.filename), date) - self.storage._bucket.get_blob.assert_called_with(self.filename) + with self.settings(TIME_ZONE='UTC'): + mt = self.storage.modified_time(self.filename) + self.assertTrue(timezone.is_naive(mt)) + self.assertEqual(mt, naive_date) + self.storage._bucket.get_blob.assert_called_with(self.filename) + + def test_get_modified_time(self): + naive_date = datetime.datetime(2017, 1, 2, 3, 4, 5, 678) + aware_date = timezone.make_aware(naive_date, timezone.utc) + + self.storage._bucket = mock.MagicMock() + blob = mock.MagicMock() + blob.updated = aware_date + self.storage._bucket.get_blob.return_value = blob + + with self.settings(TIME_ZONE='America/Montreal', USE_TZ=False): + mt = self.storage.get_modified_time(self.filename) + self.assertTrue(timezone.is_naive(mt)) + naive_date_montreal = timezone.make_naive(aware_date) + self.assertEqual(mt, naive_date_montreal) + self.storage._bucket.get_blob.assert_called_with(self.filename) + + with self.settings(TIME_ZONE='America/Montreal', USE_TZ=True): + mt = self.storage.get_modified_time(self.filename) + self.assertTrue(timezone.is_aware(mt)) + self.assertEqual(mt, aware_date) + self.storage._bucket.get_blob.assert_called_with(self.filename) def test_modified_time_no_file(self): self.storage._bucket = mock.MagicMock() From bb9307f1f40c0fea57d3c416b3d54c2b7284ca94 Mon Sep 17 00:00:00 2001 From: Jody McIntyre Date: Tue, 11 Apr 2017 22:45:15 +0000 Subject: [PATCH 104/174] Test and fix unicode handling --- storages/backends/gcloud.py | 2 +- tests/test_gcloud.py | 25 +++++++++++++++++++++++++ 2 files changed, 26 insertions(+), 1 deletion(-) diff --git a/storages/backends/gcloud.py b/storages/backends/gcloud.py index 2eac519f0..0ee149aed 100644 --- a/storages/backends/gcloud.py +++ b/storages/backends/gcloud.py @@ -141,7 +141,7 @@ def _open(self, name, mode='rb'): name = self._normalize_name(clean_name(name)) file_object = GoogleCloudFile(name, mode, self) if not file_object.blob: - raise IOError('File does not exist: %s' % name) + raise IOError(u'File does not exist: %s' % name) return file_object def _save(self, name, content): diff --git a/tests/test_gcloud.py b/tests/test_gcloud.py index 73063af94..ce79ccc51 100644 --- a/tests/test_gcloud.py +++ b/tests/test_gcloud.py @@ -1,3 +1,5 @@ +# -*- coding: utf-8 -*- + try: from unittest import mock except ImportError: # Python 3.2 and below @@ -52,6 +54,14 @@ def test_open_read_nonexistent(self): self.assertRaises(IOError, self.storage.open, self.filename) self.storage._bucket.get_blob.assert_called_with(self.filename) + def test_open_read_nonexistent_unicode(self): + filename = 'ủⓝï℅ⅆℇ.txt' + + self.storage._bucket = mock.MagicMock() + self.storage._bucket.get_blob.return_value = None + + self.assertRaises(IOError, self.storage.open, filename) + @mock.patch('storages.backends.gcloud.Blob') def test_open_write(self, MockBlob): """ @@ -83,6 +93,17 @@ def test_save(self): self.storage._bucket.get_blob().upload_from_file.assert_called_with( content, size=len(data)) + def test_save(self): + data = 'This is some test ủⓝï℅ⅆℇ content.' + filename = 'ủⓝï℅ⅆℇ.txt' + content = ContentFile(data) + + self.storage.save(filename, content) + + self.storage._client.get_bucket.assert_called_with(self.bucket_name) + self.storage._bucket.get_blob().upload_from_file.assert_called_with( + content, size=len(data)) + def test_delete(self): self.storage.delete(self.filename) @@ -249,3 +270,7 @@ def test_get_available_name(self): self.storage.file_overwrite = False self.assertEqual(self.storage.get_available_name(self.filename), self.filename) self.storage._bucket.get_blob.assert_called_with(self.filename) + + def test_get_available_name_unicode(self): + filename = 'ủⓝï℅ⅆℇ.txt' + self.assertEqual(self.storage.get_available_name(filename), filename) From acbe31dad756a719888b7d4554397fcae7349193 Mon Sep 17 00:00:00 2001 From: Jody McIntyre Date: Wed, 12 Apr 2017 00:11:26 +0000 Subject: [PATCH 105/174] Address further review comments * Remove unused argument * Return filename when file does not exist --- storages/backends/gcloud.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/storages/backends/gcloud.py b/storages/backends/gcloud.py index 0ee149aed..1119f28db 100644 --- a/storages/backends/gcloud.py +++ b/storages/backends/gcloud.py @@ -18,7 +18,7 @@ class GoogleCloudFile(File): - def __init__(self, name, mode, storage, buffer_size=None): + def __init__(self, name, mode, storage): self.name = name self._mode = mode self._storage = storage @@ -197,7 +197,7 @@ def _get_blob(self, name): blob = self.bucket.get_blob(name) if blob is None: - raise NotFound('File does not exist') + raise NotFound(u'File does not exist: {}'.format(name)) return blob From 02e5829fb98d010bd62c33db2030a05b917b7c70 Mon Sep 17 00:00:00 2001 From: Jody McIntyre Date: Wed, 12 Apr 2017 00:11:50 +0000 Subject: [PATCH 106/174] Remove *args and **kwargs This required handling the num_bytes argument explicitly, so that's now tested. --- storages/backends/gcloud.py | 11 +++++++---- tests/test_gcloud.py | 11 +++++++++++ 2 files changed, 18 insertions(+), 4 deletions(-) diff --git a/storages/backends/gcloud.py b/storages/backends/gcloud.py index 1119f28db..f9aa95ad5 100644 --- a/storages/backends/gcloud.py +++ b/storages/backends/gcloud.py @@ -50,16 +50,19 @@ def _set_file(self, value): file = property(_get_file, _set_file) - def read(self, *args, **kwargs): + def read(self, num_bytes=None): if 'r' not in self._mode: raise AttributeError("File was not opened in read mode.") - return super(GoogleCloudFile, self).read(*args, **kwargs) + if num_bytes is not None: + return super(GoogleCloudFile, self).read(num_bytes) + else: + return super(GoogleCloudFile, self).read() - def write(self, content, *args, **kwargs): + def write(self, content): if 'w' not in self._mode: raise AttributeError("File was not opened in write mode.") self._is_dirty = True - return super(GoogleCloudFile, self).write(force_bytes(content), *args, **kwargs) + return super(GoogleCloudFile, self).write(force_bytes(content)) def close(self): if self._file is not None: diff --git a/tests/test_gcloud.py b/tests/test_gcloud.py index ce79ccc51..2717355a7 100644 --- a/tests/test_gcloud.py +++ b/tests/test_gcloud.py @@ -47,6 +47,17 @@ def test_open_read(self): f.blob.download_to_file = lambda tmpfile: tmpfile.write(data) self.assertEqual(f.read(), data) + def test_open_read_num_bytes(self): + data = b'This is some test read data.' + num_bytes = 10 + + f = self.storage.open(self.filename) + self.storage._client.get_bucket.assert_called_with(self.bucket_name) + self.storage._bucket.get_blob.assert_called_with(self.filename) + + f.blob.download_to_file = lambda tmpfile: tmpfile.write(data) + self.assertEqual(f.read(num_bytes), data[0:num_bytes]) + def test_open_read_nonexistent(self): self.storage._bucket = mock.MagicMock() self.storage._bucket.get_blob.return_value = None From 1d2e206aca20052fa00bc5332b34c95b8821fd46 Mon Sep 17 00:00:00 2001 From: Jody McIntyre Date: Wed, 12 Apr 2017 00:18:25 +0000 Subject: [PATCH 107/174] Add deprecation notice to 'gs' backend --- storages/backends/gs.py | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/storages/backends/gs.py b/storages/backends/gs.py index 1425ebe2e..638eeda16 100644 --- a/storages/backends/gs.py +++ b/storages/backends/gs.py @@ -1,3 +1,13 @@ +# DEPRECATION NOTICE: This backend is deprecated in favour of the +# "gcloud" backend. This backend uses Google Cloud Storage's XML +# Interoperable API which uses keyed-hash message authentication code +# (a.k.a. developer keys) that are linked to your Google account. The +# interoperable API is really meant for migration to Google Cloud +# Storage. The biggest problem with the developer keys is security and +# privacy. Developer keys should not be shared with anyone as they can +# be used to gain access to other Google Cloud Storage buckets linked +# to your Google account. + from django.core.exceptions import ImproperlyConfigured from django.utils.deconstruct import deconstructible from django.utils.six import BytesIO From 6797d93eb550651ecfb5614c0b899e6125b1c095 Mon Sep 17 00:00:00 2001 From: Simon Lightfoot Date: Wed, 12 Apr 2017 17:13:11 +0100 Subject: [PATCH 108/174] Added project keyword to support GCS. See https://libcloud.readthedocs.io/en/latest/compute/drivers/gce.html (#269) --- storages/backends/apache_libcloud.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/storages/backends/apache_libcloud.py b/storages/backends/apache_libcloud.py index a2a5390de..776b8ac45 100644 --- a/storages/backends/apache_libcloud.py +++ b/storages/backends/apache_libcloud.py @@ -33,6 +33,9 @@ def __init__(self, provider_name=None, option=None): extra_kwargs = {} if 'region' in self.provider: extra_kwargs['region'] = self.provider['region'] + # Used by the GoogleStorageDriver + if 'project' in self.provider: + extra_kwargs['project'] = self.provider['project'] try: provider_type = self.provider['type'] if isinstance(provider_type, string_types): From 11c1089bbc362cb15b229a98bbe197b6b721147e Mon Sep 17 00:00:00 2001 From: Josh Schneier Date: Tue, 11 Apr 2017 12:16:21 -0400 Subject: [PATCH 109/174] Update CHANGELOG style and for #269 --- CHANGELOG.rst | 22 ++++++++++++---------- 1 file changed, 12 insertions(+), 10 deletions(-) diff --git a/CHANGELOG.rst b/CHANGELOG.rst index a4615da7b..21cfa127e 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -4,13 +4,14 @@ django-storages change log 1.6.0 (XXXX-XX-XX) ****************** -* *Breaking: Remove backends deprecated in v1.5.1* (`#280`_) +* **Breaking:** Remove backends deprecated in v1.5.1 (`#280`_) * Pass in the location constraint when auto creating a bucket (`#257`_, `#258`_ thanks @mattayes) * Add support for reading ``AWS_SESSION_TOKEN`` and ``AWS_SECURITY_TOKEN`` from the environment to ``S3Boto3Storage`` and ``S3BotoStorage``. (`#283`_ thanks @bxm156) * Fix Boto3 non-ascii filenames on Python 2.7 (`#216`_, `#217`_ thanks @AGASS007) * Fix ``collectstatic`` timezone handling in and add ``get_modified_time`` to ``S3BotoStorage`` (`#290`_) * Add support for Django 1.11 (`#295`_ thanks @jdufresne) +* Add ``project`` keyword support to GCS in ``LibCloudStorage`` backend (`#269`_ thanks @slightfoot) .. _#217: https://github.com/jschneier/django-storages/pull/217 .. _#216: https://github.com/jschneier/django-storages/issues/216 @@ -20,6 +21,7 @@ django-storages change log .. _#258: https://github.com/jschneier/django-storages/pull/258 .. _#290: https://github.com/jschneier/django-storages/pull/290 .. _#295: https://github.com/jschneier/django-storages/pull/295 +.. _#269: https://github.com/jschneier/django-storages/pull/269 1.5.2 (2017-01-13) ****************** @@ -40,8 +42,8 @@ django-storages change log 1.5.1 (2016-09-13) ****************** -* *Breaking: Drop support for Django 1.7* (`#185`_) -* *Breaking: Deprecate hashpath, image, overwrite, mogile, symlinkorcopy, database, mogile, couchdb.* +* **Breaking:** Drop support for Django 1.7 (`#185`_) +* **Deprecation:** hashpath, image, overwrite, mogile, symlinkorcopy, database, mogile, couchdb. See (`issue #202`_) to discuss maintenance going forward * Use a fixed ``mtime`` argument for ``GzipFile`` in ``S3BotoStorage`` and ``S3Boto3Storage`` to ensure a stable output for gzipped files @@ -132,9 +134,9 @@ django-storages change log 1.3 (2015-08-14) **************** -* *Breaking: Drop Support for Django 1.5 and Python 2.6* -* Remove previously deprecated mongodb backend -* Remove previously deprecated ``parse_ts_extended`` from s3boto storage +* **Breaking:** Drop Support for Django 1.5 and Python 2.6 +* **Breaking:** Remove previously deprecated mongodb backend +* **Breaking:** Remove previously deprecated ``parse_ts_extended`` from s3boto storage * Add support for Django 1.8+ (`#36`__) * Add ``AWS_S3_PROXY_HOST`` and ``AWS_S3_PROXY_PORT`` settings for s3boto backend (`#41`_) * Fix Python3K compat issue in apache_libcloud (`#52`_) @@ -176,9 +178,9 @@ django-storages change log 1.2.1 (2014-12-31) ****************** +* **Deprecation:** Issue warning about ``parse_ts_extended`` +* **Deprecation:** mongodb backend - django-mongodb-engine now ships its own storage backend * Fix ``storage.modified_time`` crashing on new files when ``AWS_PRELOAD_METADATA=True`` (`#11`_, `#12`__, `#14`_) -* Deprecate and issue warning about ``parse_ts_extended`` -* Deprecate mongodb backend - django-mongodb-engine now ships its own storage backend .. _#11: https://github.com/jschneier/django-storages/pull/11 __ https://github.com/jschneier/django-storages/issues/12 @@ -188,11 +190,11 @@ __ https://github.com/jschneier/django-storages/issues/12 1.2 (2014-12-14) **************** +* **Breaking:** Remove legacy S3 storage (`#1`_) +* **Breaking:** Remove mosso files backend (`#2`_) * Add text/javascript mimetype to S3BotoStorage gzip allowed defaults * Add support for Django 1.7 migrations in S3BotoStorage and ApacheLibCloudStorage (`#5`_, `#8`_) * Python3K (3.3+) now available for S3Boto backend (`#4`_) -* Remove legacy S3 storage (`#1`_) -* Remove mosso files backend (`#2`_) .. _#8: https://github.com/jschneier/django-storages/pull/8 .. _#5: https://github.com/jschneier/django-storages/pull/5 From 699233a28418c8367e56267c227d7c8029a7a7e4 Mon Sep 17 00:00:00 2001 From: Josh Schneier Date: Tue, 11 Apr 2017 12:35:52 -0400 Subject: [PATCH 110/174] Remove Mercurial ignore --- .hgignore | 21 --------------------- 1 file changed, 21 deletions(-) delete mode 100644 .hgignore diff --git a/.hgignore b/.hgignore deleted file mode 100644 index 1a2d13161..000000000 --- a/.hgignore +++ /dev/null @@ -1,21 +0,0 @@ -syntax:glob - -*.DS_Store -*.egg -*.egg-info -*.elc -*.gz -*.log -*.orig -*.pyc -*.swp -*.tmp -*~ -.tox/ -_build/ -build/ -dist/* -django -local_settings.py -setuptools* -testdb.sqlite From 9ba4880db981acf5ea59712947f96e80f7258afd Mon Sep 17 00:00:00 2001 From: Josh Schneier Date: Wed, 12 Apr 2017 16:17:21 -0400 Subject: [PATCH 111/174] Fixes #297 Return None from S3BotoStoarge._get_key when AWS_PRELOAD_METADATA is True and the name is not in entries --- storages/backends/s3boto.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/storages/backends/s3boto.py b/storages/backends/s3boto.py index 843f61b41..1bd96573a 100644 --- a/storages/backends/s3boto.py +++ b/storages/backends/s3boto.py @@ -444,7 +444,7 @@ def _save_content(self, key, content, headers): def _get_key(self, name): name = self._normalize_name(self._clean_name(name)) if self.entries: - return self.entries[name] + return self.entries.get(name) return self.bucket.get_key(self._encode_name(name)) def delete(self, name): From 7d1e5e0f74cd8b4a40595b6b6b5adb48f7ba071b Mon Sep 17 00:00:00 2001 From: ldng Date: Thu, 13 Apr 2017 20:51:33 +0200 Subject: [PATCH 112/174] Adds support for already gzipped content to S3 storage to the boto3 backend #263 (#264) * Adds support for already gzipped content to S3 storage to the boto3 backend #263 * Fix parameter typo * Fix test --- storages/backends/s3boto3.py | 6 +++++- tests/test_s3boto3.py | 17 +++++++++++++++++ 2 files changed, 22 insertions(+), 1 deletion(-) diff --git a/storages/backends/s3boto3.py b/storages/backends/s3boto3.py index 2e5241734..c0bbf7419 100644 --- a/storages/backends/s3boto3.py +++ b/storages/backends/s3boto3.py @@ -442,8 +442,9 @@ def _save(self, name, content): cleaned_name = self._clean_name(name) name = self._normalize_name(cleaned_name) parameters = self.object_parameters.copy() + _type, encoding = mimetypes.guess_type(name) content_type = getattr(content, 'content_type', - mimetypes.guess_type(name)[0] or self.default_content_type) + _type or self.default_content_type) # setting the content_type in the key object is not enough. parameters.update({'ContentType': content_type}) @@ -451,6 +452,9 @@ def _save(self, name, content): if self.gzip and content_type in self.gzip_content_types: content = self._compress_content(content) parameters.update({'ContentEncoding': 'gzip'}) + elif encoding: + # If the content already has a particular encoding, set it + parameters.update({'ContentEncoding': encoding}) encoded_name = self._encode_name(name) obj = self.bucket.Object(encoded_name) diff --git a/tests/test_s3boto3.py b/tests/test_s3boto3.py index 69b1f9fbb..9bb81d4b0 100644 --- a/tests/test_s3boto3.py +++ b/tests/test_s3boto3.py @@ -129,6 +129,23 @@ def test_storage_save(self): } ) + def test_storage_save_gzipped(self): + """ + Test saving a gzipped file + """ + name = 'test_storage_save.gz' + content = ContentFile("I am gzip'd") + self.storage.save(name, content) + obj = self.storage.bucket.Object.return_value + obj.upload_fileobj.assert_called_with( + content, + ExtraArgs={ + 'ContentType': 'application/octet-stream', + 'ContentEncoding': 'gzip', + 'ACL': self.storage.default_acl, + } + ) + def test_storage_save_gzip(self): """ Test saving a file with gzip enabled. From f28a6371027bcfd6fef63cf737ffbdf3e47f47b9 Mon Sep 17 00:00:00 2001 From: Josh Schneier Date: Thu, 13 Apr 2017 14:55:51 -0400 Subject: [PATCH 113/174] Update CHANGELOG for #263 & #264 --- CHANGELOG.rst | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/CHANGELOG.rst b/CHANGELOG.rst index 21cfa127e..395f21136 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -12,6 +12,8 @@ django-storages change log * Fix ``collectstatic`` timezone handling in and add ``get_modified_time`` to ``S3BotoStorage`` (`#290`_) * Add support for Django 1.11 (`#295`_ thanks @jdufresne) * Add ``project`` keyword support to GCS in ``LibCloudStorage`` backend (`#269`_ thanks @slightfoot) +* Files that have a guessable encoding (e.g. gzip or compress) will be uploaded with that Content-Encoding in + the ``s3boto3`` backend (issue `#263`_ pr `#264`_ thanks @ldng) .. _#217: https://github.com/jschneier/django-storages/pull/217 .. _#216: https://github.com/jschneier/django-storages/issues/216 @@ -22,6 +24,8 @@ django-storages change log .. _#290: https://github.com/jschneier/django-storages/pull/290 .. _#295: https://github.com/jschneier/django-storages/pull/295 .. _#269: https://github.com/jschneier/django-storages/pull/269 +.. _#263: https://github.com/jschneier/django-storages/issues/263 +.. _#264: https://github.com/jschneier/django-storages/pull/264 1.5.2 (2017-01-13) ****************** From ee0e5753e4ef1843c385ac6a646a7fec959c2d17 Mon Sep 17 00:00:00 2001 From: Josh Schneier Date: Thu, 13 Apr 2017 15:08:54 -0400 Subject: [PATCH 114/174] Highlight the boto3 backend in the README --- README.rst | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/README.rst b/README.rst index 660f592d1..3ccacdf47 100644 --- a/README.rst +++ b/README.rst @@ -23,9 +23,9 @@ hasn't been released yet) then the magic incantation you are looking for is:: pip install -e 'git+https://github.com/jschneier/django-storages.git#egg=django-storages' Once that is done add ``storages`` to your ``INSTALLED_APPS`` and set ``DEFAULT_FILE_STORAGE`` to the -backend of your choice. If, for example, you want to use the s3boto backend you would set:: +backend of your choice. If, for example, you want to use the boto3 backend you would set:: - DEFAULT_FILE_STORAGE = 'storages.backends.s3boto.S3BotoStorage' + DEFAULT_FILE_STORAGE = 'storages.backends.s3boto3.S3Boto3Storage' There are also a number of settings available to control how each storage backend functions, please consult the documentation for a comprehensive list. From e52a127523fdd5be50bb670ccad566c5d527f3d1 Mon Sep 17 00:00:00 2001 From: Josh Schneier Date: Sat, 15 Apr 2017 14:47:15 -0400 Subject: [PATCH 115/174] Properly encode Windows paths in Dropbox backend --- storages/backends/dropbox.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/storages/backends/dropbox.py b/storages/backends/dropbox.py index c76ba2718..1808633b2 100644 --- a/storages/backends/dropbox.py +++ b/storages/backends/dropbox.py @@ -62,7 +62,7 @@ def __init__(self, oauth2_access_token=None, root_path=None): def _full_path(self, name): if name == '/': name = '' - return safe_join(self.root_path, name) + return safe_join(self.root_path, name).replace('\\', '/') def delete(self, name): self.client.file_delete(self._full_path(name)) From bbb203ebe7171944f3dcffd320ef975342ae9c32 Mon Sep 17 00:00:00 2001 From: Jody McIntyre Date: Sun, 16 Apr 2017 22:21:13 +0000 Subject: [PATCH 116/174] Print deprecation notice as warning --- storages/backends/gs.py | 19 ++++++++++--------- 1 file changed, 10 insertions(+), 9 deletions(-) diff --git a/storages/backends/gs.py b/storages/backends/gs.py index 638eeda16..ecd8ad3eb 100644 --- a/storages/backends/gs.py +++ b/storages/backends/gs.py @@ -1,12 +1,13 @@ -# DEPRECATION NOTICE: This backend is deprecated in favour of the -# "gcloud" backend. This backend uses Google Cloud Storage's XML -# Interoperable API which uses keyed-hash message authentication code -# (a.k.a. developer keys) that are linked to your Google account. The -# interoperable API is really meant for migration to Google Cloud -# Storage. The biggest problem with the developer keys is security and -# privacy. Developer keys should not be shared with anyone as they can -# be used to gain access to other Google Cloud Storage buckets linked -# to your Google account. +import warnings +warnings.warn("DEPRECATION NOTICE: This backend is deprecated in favour of the " + "\"gcloud\" backend. This backend uses Google Cloud Storage's XML " + "Interoperable API which uses keyed-hash message authentication code " + "(a.k.a. developer keys) that are linked to your Google account. The " + "interoperable API is really meant for migration to Google Cloud " + "Storage. The biggest problem with the developer keys is security and " + "privacy. Developer keys should not be shared with anyone as they can " + "be used to gain access to other Google Cloud Storage buckets linked " + "to your Google account.") from django.core.exceptions import ImproperlyConfigured from django.utils.deconstruct import deconstructible From 598753b3710c5f0ffa0bee9b3e2a0feeac4a8c71 Mon Sep 17 00:00:00 2001 From: Jody McIntyre Date: Tue, 18 Apr 2017 20:40:48 +0000 Subject: [PATCH 117/174] Simplify ACL options and improve ACL documentation --- docs/backends/gcloud.rst | 14 +++++++++----- storages/backends/gcloud.py | 6 ++---- 2 files changed, 11 insertions(+), 9 deletions(-) diff --git a/docs/backends/gcloud.rst b/docs/backends/gcloud.rst index 21f8e51e7..4e126acf8 100644 --- a/docs/backends/gcloud.rst +++ b/docs/backends/gcloud.rst @@ -41,13 +41,17 @@ back to the default inferred from the environment. If True, attempt to create the bucket if it does not exist. -``GS_DEFAULT_ACL`` (optional) +``GS_AUTO_CREATE_ACL`` (optional, default is ``projectPrivate``) -If set to ``private`` changes uploaded file's Access Control List from the default permission ``public-read`` to give owner full control and remove read access from everyone else. +ACL used when creating a new bucket, from the +`list of predefined ACLs `_. +(A "JSON API" ACL is preferred but an "XML API/gsutil" ACL will be +translated.) -``GS_BUCKET_ACL`` (optional) - -ACL used when creating a new bucket; defaults to ``GS_DEFAULT_ACL``. +Note that the ACL you select must still give the service account +running the gcloud backend to have OWNER permission on the bucket. If +you're using the default service account, this means you're restricted +to the ``projectPrivate`` ACL. ``GS_FILE_CHARSET`` (optional) diff --git a/storages/backends/gcloud.py b/storages/backends/gcloud.py index f9aa95ad5..dfd5a39df 100644 --- a/storages/backends/gcloud.py +++ b/storages/backends/gcloud.py @@ -79,8 +79,7 @@ class GoogleCloudStorage(Storage): credentials = setting('GS_CREDENTIALS', None) bucket_name = setting('GS_BUCKET_NAME', None) auto_create_bucket = setting('GS_AUTO_CREATE_BUCKET', False) - default_acl = setting('GS_DEFAULT_ACL', 'public-read') - bucket_acl = setting('GS_BUCKET_ACL', default_acl) + auto_create_acl = setting('GS_AUTO_CREATE_ACL', 'projectPrivate') file_name_charset = setting('GS_FILE_NAME_CHARSET', 'utf-8') file_overwrite = setting('GS_FILE_OVERWRITE', True) # The max amount of memory a returned file can take up before being @@ -121,8 +120,7 @@ def _get_or_create_bucket(self, name): except NotFound: if self.auto_create_bucket: bucket = self.client.create_bucket(name) - bucket.acl.all().grant(self.bucket_acl) - bucket.acl.save() + bucket.acl.save_predefined(self.auto_create_acl) return bucket raise ImproperlyConfigured("Bucket %s does not exist. Buckets " "can be automatically created by " From 7ee116ba138702661171b324853e7ffd5f59125b Mon Sep 17 00:00:00 2001 From: Jody McIntyre Date: Wed, 19 Apr 2017 18:34:46 +0000 Subject: [PATCH 118/174] Address final PR comments * Simplify `num_bytes=None` handling * Make the `gs` warning a `DeprecationWarning` --- storages/backends/gcloud.py | 9 +++++---- storages/backends/gs.py | 2 +- 2 files changed, 6 insertions(+), 5 deletions(-) diff --git a/storages/backends/gcloud.py b/storages/backends/gcloud.py index dfd5a39df..7e06886de 100644 --- a/storages/backends/gcloud.py +++ b/storages/backends/gcloud.py @@ -53,10 +53,11 @@ def _set_file(self, value): def read(self, num_bytes=None): if 'r' not in self._mode: raise AttributeError("File was not opened in read mode.") - if num_bytes is not None: - return super(GoogleCloudFile, self).read(num_bytes) - else: - return super(GoogleCloudFile, self).read() + + if num_bytes is None: + num_bytes = -1 + + return super(GoogleCloudFile, self).read(num_bytes) def write(self, content): if 'w' not in self._mode: diff --git a/storages/backends/gs.py b/storages/backends/gs.py index ecd8ad3eb..10678d45b 100644 --- a/storages/backends/gs.py +++ b/storages/backends/gs.py @@ -7,7 +7,7 @@ "Storage. The biggest problem with the developer keys is security and " "privacy. Developer keys should not be shared with anyone as they can " "be used to gain access to other Google Cloud Storage buckets linked " - "to your Google account.") + "to your Google account.", DeprecationWarning) from django.core.exceptions import ImproperlyConfigured from django.utils.deconstruct import deconstructible From fe0834cb18b2a47d0db47654f178cc2f878f39f7 Mon Sep 17 00:00:00 2001 From: Josh Schneier Date: Sat, 15 Apr 2017 14:55:08 -0400 Subject: [PATCH 119/174] Update CHANGELOG --- CHANGELOG.rst | 2 ++ 1 file changed, 2 insertions(+) diff --git a/CHANGELOG.rst b/CHANGELOG.rst index 395f21136..b5f6dbb3c 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -14,6 +14,7 @@ django-storages change log * Add ``project`` keyword support to GCS in ``LibCloudStorage`` backend (`#269`_ thanks @slightfoot) * Files that have a guessable encoding (e.g. gzip or compress) will be uploaded with that Content-Encoding in the ``s3boto3`` backend (issue `#263`_ pr `#264`_ thanks @ldng) +* The Dropbox backend now properly translates backslashes in Windows paths into forward slashes (`e52a127`_) .. _#217: https://github.com/jschneier/django-storages/pull/217 .. _#216: https://github.com/jschneier/django-storages/issues/216 @@ -26,6 +27,7 @@ django-storages change log .. _#269: https://github.com/jschneier/django-storages/pull/269 .. _#263: https://github.com/jschneier/django-storages/issues/263 .. _#264: https://github.com/jschneier/django-storages/pull/264 +.. _e52a127: https://github.com/jschneier/django-storages/commit/e52a127523fdd5be50bb670ccad566c5d527f3d1 1.5.2 (2017-01-13) ****************** From 51c7d6036dd40dfc3892af0e2fa213f66d40d74a Mon Sep 17 00:00:00 2001 From: Josh Schneier Date: Wed, 19 Apr 2017 16:41:41 -0400 Subject: [PATCH 120/174] Update CHANGELOG for #236 --- CHANGELOG.rst | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/CHANGELOG.rst b/CHANGELOG.rst index b5f6dbb3c..f5f294eb7 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -5,6 +5,9 @@ django-storages change log ****************** * **Breaking:** Remove backends deprecated in v1.5.1 (`#280`_) +* **Deprecation:** The undocumented ``gs.GSBotoStorage`` backend. See the new ``gcloud.GoogleCloudStorage`` + or ``apache_libcloud.LibCloudStorage`` backends instead. (`#236`_) +* Add a new backend, ``gcloud.GoogleCloudStorage`` based on the ``google-cloud`` bindings. (`#236`_ thanks @scjody) * Pass in the location constraint when auto creating a bucket (`#257`_, `#258`_ thanks @mattayes) * Add support for reading ``AWS_SESSION_TOKEN`` and ``AWS_SECURITY_TOKEN`` from the environment to ``S3Boto3Storage`` and ``S3BotoStorage``. (`#283`_ thanks @bxm156) @@ -28,6 +31,7 @@ django-storages change log .. _#263: https://github.com/jschneier/django-storages/issues/263 .. _#264: https://github.com/jschneier/django-storages/pull/264 .. _e52a127: https://github.com/jschneier/django-storages/commit/e52a127523fdd5be50bb670ccad566c5d527f3d1 +.. _#236: https://github.com/jschneier/django-storages/pull/236 1.5.2 (2017-01-13) ****************** From ddcdf9eaa0c70b2528b3b9c4737694f793a7cbb7 Mon Sep 17 00:00:00 2001 From: faxioman Date: Fri, 5 May 2017 21:09:54 +0200 Subject: [PATCH 121/174] Add missing gcloud test requirements (#307) --- requirements-tests.txt | 1 + 1 file changed, 1 insertion(+) diff --git a/requirements-tests.txt b/requirements-tests.txt index 4fb5282ea..e47e30336 100644 --- a/requirements-tests.txt +++ b/requirements-tests.txt @@ -5,3 +5,4 @@ boto3>=1.2.3 dropbox>=3.24 mock paramiko +google-cloud>=0.25.0 \ No newline at end of file From 5cce7c998edca0878083ca2d16c8364200594dd2 Mon Sep 17 00:00:00 2001 From: Stanislav Kaledin Date: Fri, 2 Jun 2017 18:38:19 +0300 Subject: [PATCH 122/174] Fix issue #118 (#325) --- .gitignore | 1 + AUTHORS | 1 + storages/backends/sftpstorage.py | 10 ++++++---- tests/test_sftp.py | 6 ++++++ 4 files changed, 14 insertions(+), 4 deletions(-) diff --git a/.gitignore b/.gitignore index 63940c2de..1da610f45 100644 --- a/.gitignore +++ b/.gitignore @@ -22,3 +22,4 @@ setuptools* __pycache__ .coverage .cache +.idea \ No newline at end of file diff --git a/AUTHORS b/AUTHORS index 86722f056..6bd343522 100644 --- a/AUTHORS +++ b/AUTHORS @@ -29,6 +29,7 @@ By order of apparition, thanks: * Matt Braymer-Hayes (S3 with Boto3) * Eirik Martiniussen Sylliaas (Google Cloud Storage native support) * Jody McIntyre (Google Cloud Storage native support) + * Stanislav Kaledin (Bug fixes in SFTPStorage) Extra thanks to Marty for adding this in Django, you can buy his very interesting book (Pro Django). diff --git a/storages/backends/sftpstorage.py b/storages/backends/sftpstorage.py index 6efdf1234..0a5b6e27d 100644 --- a/storages/backends/sftpstorage.py +++ b/storages/backends/sftpstorage.py @@ -53,11 +53,12 @@ def __init__(self, host=None, params=None, interactive=None, file_mode=None, def _connect(self): self._ssh = paramiko.SSHClient() - if self._known_host_file is not None: + known_host_file = self._known_host_file or os.path.expanduser( + os.path.join("~", ".ssh", "known_hosts") + ) + + if os.path.exists(known_host_file): self._ssh.load_host_keys(self._known_host_file) - else: - # automatically add host keys from current user. - self._ssh.load_host_keys(os.path.expanduser(os.path.join("~", ".ssh", "known_hosts"))) # and automatically add new host keys for hosts we haven't seen before. self._ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy()) @@ -152,6 +153,7 @@ def delete(self, name): def exists(self, name): # Try to retrieve file info. Return true on success, false on failure. remote_path = self._remote_path(name) + try: self.sftp.stat(remote_path) return True diff --git a/tests/test_sftp.py b/tests/test_sftp.py index e31ef445e..eef4fb8a6 100644 --- a/tests/test_sftp.py +++ b/tests/test_sftp.py @@ -17,6 +17,12 @@ def setUp(self): def test_init(self): pass + @patch('paramiko.SSHClient') + def test_no_known_hosts_file(self, mock_ssh): + self.storage._known_host_file = "not_existed_file" + self.storage._connect() + self.assertEqual('foo', mock_ssh.return_value.connect.call_args[0][0]) + @patch('paramiko.SSHClient') def test_connect(self, mock_ssh): self.storage._connect() From c297bde94a8b617a34a175ac660e5c27111b5fd6 Mon Sep 17 00:00:00 2001 From: Josh Schneier Date: Fri, 2 Jun 2017 11:43:04 -0400 Subject: [PATCH 123/174] Update CHANGELOG for #325 --- CHANGELOG.rst | 3 +++ 1 file changed, 3 insertions(+) diff --git a/CHANGELOG.rst b/CHANGELOG.rst index f5f294eb7..47b07c1a5 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -5,6 +5,9 @@ django-storages change log ****************** * **Breaking:** Remove backends deprecated in v1.5.1 (`#280`_) +* **Breaking:** The ``SFTPStorage`` backend now checks for the existence of the fallback ``~/.ssh/known_hosts`` + before attempting to load it. If you had previously been passing in a path to a non-existent file it will no longer + attempt to load the fallback. * **Deprecation:** The undocumented ``gs.GSBotoStorage`` backend. See the new ``gcloud.GoogleCloudStorage`` or ``apache_libcloud.LibCloudStorage`` backends instead. (`#236`_) * Add a new backend, ``gcloud.GoogleCloudStorage`` based on the ``google-cloud`` bindings. (`#236`_ thanks @scjody) From 810b9ba5c90c55a0ff8fd52bfe65987fc8573723 Mon Sep 17 00:00:00 2001 From: Jon Dufresne Date: Sat, 3 Jun 2017 12:40:53 -0700 Subject: [PATCH 124/174] Add flake8 to tox and Travis CI configuration Run flake8 with tests to help test for code consistency, conventions, and code smells. Fixed all existing flake8 errors: * Duplicated function names * Unused imports * Unused variables * Whitespace and styling conventions --- .travis.yml | 1 + requirements-tests.txt | 3 ++- setup.cfg | 6 ++++++ setup.py | 1 + storages/backends/gcloud.py | 2 +- storages/backends/gs.py | 22 ++++++++++++---------- storages/backends/s3boto.py | 4 +++- tests/test_ftp.py | 22 ++++++++++++++++++---- tests/test_gcloud.py | 3 +-- tests/test_sftp.py | 2 +- tests/test_utils.py | 2 +- tox.ini | 6 ++++++ 12 files changed, 53 insertions(+), 21 deletions(-) diff --git a/.travis.yml b/.travis.yml index 91aeef546..7cba94521 100644 --- a/.travis.yml +++ b/.travis.yml @@ -4,6 +4,7 @@ cache: pip matrix: include: + - env: TOX_ENV=flake8 - python: 2.7 env: TOX_ENV=py27-django18 - python: 3.3 diff --git a/requirements-tests.txt b/requirements-tests.txt index e47e30336..060be6653 100644 --- a/requirements-tests.txt +++ b/requirements-tests.txt @@ -1,8 +1,9 @@ Django>=1.8 +flake8 pytest-cov>=2.2.1 boto>=2.32.0 boto3>=1.2.3 dropbox>=3.24 mock paramiko -google-cloud>=0.25.0 \ No newline at end of file +google-cloud>=0.25.0 diff --git a/setup.cfg b/setup.cfg index 3c6e79cf3..8431ecd5b 100644 --- a/setup.cfg +++ b/setup.cfg @@ -1,2 +1,8 @@ [bdist_wheel] universal=1 + +[flake8] +exclude = + .tox, + docs +max-line-length = 119 diff --git a/setup.py b/setup.py index c7a6d3373..ed759e6b6 100644 --- a/setup.py +++ b/setup.py @@ -11,6 +11,7 @@ def get_requirements_tests(): with open('requirements-tests.txt') as f: return f.readlines() + setup( name='django-storages', version=storages.__version__, diff --git a/storages/backends/gcloud.py b/storages/backends/gcloud.py index 7e06886de..e9c21ea07 100644 --- a/storages/backends/gcloud.py +++ b/storages/backends/gcloud.py @@ -4,7 +4,7 @@ from django.core.files.base import File from django.core.files.storage import Storage from django.utils.deconstruct import deconstructible -from django.utils.encoding import force_bytes, force_text, smart_str +from django.utils.encoding import force_bytes, smart_str from django.utils import timezone from storages.utils import clean_name, safe_join, setting diff --git a/storages/backends/gs.py b/storages/backends/gs.py index 10678d45b..05142f7f0 100644 --- a/storages/backends/gs.py +++ b/storages/backends/gs.py @@ -1,13 +1,4 @@ import warnings -warnings.warn("DEPRECATION NOTICE: This backend is deprecated in favour of the " - "\"gcloud\" backend. This backend uses Google Cloud Storage's XML " - "Interoperable API which uses keyed-hash message authentication code " - "(a.k.a. developer keys) that are linked to your Google account. The " - "interoperable API is really meant for migration to Google Cloud " - "Storage. The biggest problem with the developer keys is security and " - "privacy. Developer keys should not be shared with anyone as they can " - "be used to gain access to other Google Cloud Storage buckets linked " - "to your Google account.", DeprecationWarning) from django.core.exceptions import ImproperlyConfigured from django.utils.deconstruct import deconstructible @@ -25,6 +16,17 @@ "See https://github.com/boto/boto") +warnings.warn("DEPRECATION NOTICE: This backend is deprecated in favour of the " + "\"gcloud\" backend. This backend uses Google Cloud Storage's XML " + "Interoperable API which uses keyed-hash message authentication code " + "(a.k.a. developer keys) that are linked to your Google account. The " + "interoperable API is really meant for migration to Google Cloud " + "Storage. The biggest problem with the developer keys is security and " + "privacy. Developer keys should not be shared with anyone as they can " + "be used to gain access to other Google Cloud Storage buckets linked " + "to your Google account.", DeprecationWarning) + + class GSBotoStorageFile(S3BotoStorageFile): def write(self, content): @@ -97,7 +99,7 @@ def _get_or_create_bucket(self, name): storage_class = 'STANDARD' try: return self.connection.get_bucket(name, - validate=self.auto_create_bucket) + validate=self.auto_create_bucket) except self.connection_response_error: if self.auto_create_bucket: bucket = self.connection.create_bucket(name, storage_class=storage_class) diff --git a/storages/backends/s3boto.py b/storages/backends/s3boto.py index 8af6e5155..f7d642648 100644 --- a/storages/backends/s3boto.py +++ b/storages/backends/s3boto.py @@ -110,7 +110,9 @@ def write(self, content, *args, **kwargs): upload_headers = { provider.acl_header: self._storage.default_acl } - upload_headers.update({'Content-Type': mimetypes.guess_type(self.key.name)[0] or self._storage.key_class.DefaultContentType}) + upload_headers.update({ + 'Content-Type': mimetypes.guess_type(self.key.name)[0] or self._storage.key_class.DefaultContentType + }) upload_headers.update(self._storage.headers) self._multipart = self._storage.bucket.initiate_multipart_upload( self.key.name, diff --git a/tests/test_ftp.py b/tests/test_ftp.py index 3b539e703..60164bcb7 100644 --- a/tests/test_ftp.py +++ b/tests/test_ftp.py @@ -44,11 +44,25 @@ def test_init_location_from_setting(self, mock_setting): def test_decode_location(self): config = self.storage._decode_location(URL) - wanted_config = {'passwd': 'b@r', 'host': 'localhost', 'user': 'foo', 'active': False, 'path': '/', 'port': 2121} + wanted_config = { + 'passwd': 'b@r', + 'host': 'localhost', + 'user': 'foo', + 'active': False, + 'path': '/', + 'port': 2121, + } self.assertEqual(config, wanted_config) # Test active FTP config = self.storage._decode_location('a'+URL) - wanted_config = {'passwd': 'b@r', 'host': 'localhost', 'user': 'foo', 'active': True, 'path': '/', 'port': 2121} + wanted_config = { + 'passwd': 'b@r', + 'host': 'localhost', + 'user': 'foo', + 'active': True, + 'path': '/', + 'port': 2121, + } self.assertEqual(config, wanted_config) def test_decode_location_error(self): @@ -84,7 +98,7 @@ def test_disconnect(self, mock_ftp_quit): self.storage.disconnect() self.assertIsNone(self.storage._connection) - @patch('ftplib.FTP', **{'return_value.pwd.return_value': 'foo',}) + @patch('ftplib.FTP', **{'return_value.pwd.return_value': 'foo'}) def test_mkremdirs(self, mock_ftp): self.storage._start_connection() self.storage._mkremdirs('foo/bar') @@ -116,7 +130,7 @@ def test_read(self, mock_ftp): self.storage._read('foo') @patch('ftplib.FTP', **{'return_value.pwd.side_effect': IOError()}) - def test_read(self, mock_ftp): + def test_read2(self, mock_ftp): self.storage._start_connection() with self.assertRaises(ftp.FTPStorageException): self.storage._read('foo') diff --git a/tests/test_gcloud.py b/tests/test_gcloud.py index 2717355a7..a5db15d8e 100644 --- a/tests/test_gcloud.py +++ b/tests/test_gcloud.py @@ -7,7 +7,6 @@ import datetime -from django.core.exceptions import ImproperlyConfigured from django.core.files.base import ContentFile from django.utils import timezone from django.test import TestCase @@ -104,7 +103,7 @@ def test_save(self): self.storage._bucket.get_blob().upload_from_file.assert_called_with( content, size=len(data)) - def test_save(self): + def test_save2(self): data = 'This is some test ủⓝï℅ⅆℇ content.' filename = 'ủⓝï℅ⅆℇ.txt' content = ContentFile(data) diff --git a/tests/test_sftp.py b/tests/test_sftp.py index eef4fb8a6..29acd4069 100644 --- a/tests/test_sftp.py +++ b/tests/test_sftp.py @@ -34,7 +34,7 @@ def test_open(self): @patch('storages.backends.sftpstorage.SFTPStorage.sftp') def test_read(self, mock_sftp): - file_ = self.storage._read('foo') + self.storage._read('foo') self.assertTrue(mock_sftp.open.called) @patch('storages.backends.sftpstorage.SFTPStorage.sftp') diff --git a/tests/test_utils.py b/tests/test_utils.py index 2a892e283..5f0a118c2 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -53,7 +53,7 @@ def test_normal(self): def test_with_dot(self): path = utils.safe_join("", "path/./somewhere/../other", "..", - ".", "to/./somewhere") + ".", "to/./somewhere") self.assertEqual(path, "path/to/somewhere") def test_base_url(self): diff --git a/tox.ini b/tox.ini index 24fa21375..ba37097f0 100644 --- a/tox.ini +++ b/tox.ini @@ -1,5 +1,6 @@ [tox] envlist = + flake8 {py27,py33,py34,py35}-django18, {py27,py34,py35}-django19 {py27,py34,py35}-django110 @@ -23,3 +24,8 @@ deps = dropbox>=3.24 paramiko google-cloud-storage>=0.22.0 + + +[testenv:flake8] +deps = flake8 +commands = flake8 From f2fb535f990d0580195b5592b30a3f2ddc48e4db Mon Sep 17 00:00:00 2001 From: Jon Dufresne Date: Mon, 5 Jun 2017 06:53:23 -0700 Subject: [PATCH 125/174] Alphabetize requirements file --- requirements-tests.txt | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/requirements-tests.txt b/requirements-tests.txt index 060be6653..cf7218eca 100644 --- a/requirements-tests.txt +++ b/requirements-tests.txt @@ -1,9 +1,9 @@ -Django>=1.8 -flake8 -pytest-cov>=2.2.1 -boto>=2.32.0 boto3>=1.2.3 +boto>=2.32.0 +Django>=1.8 dropbox>=3.24 +flake8 +google-cloud>=0.25.0 mock paramiko -google-cloud>=0.25.0 +pytest-cov>=2.2.1 From 895a068fcf3f23a6b71294c5b4b67a822df6e642 Mon Sep 17 00:00:00 2001 From: Jon Dufresne Date: Fri, 19 May 2017 14:48:11 -0700 Subject: [PATCH 126/174] Fix S3 safe_join() to allow colons Combine the identical s3boto3 and s3boto implementations of safe_join() and its tests to reduce code duplication. Fixes #248 --- storages/backends/s3boto3.py | 35 +----------------------------- storages/utils.py | 14 ++++++------ tests/test_s3boto3.py | 42 ------------------------------------ tests/test_utils.py | 15 ++++++++++--- 4 files changed, 19 insertions(+), 87 deletions(-) diff --git a/storages/backends/s3boto3.py b/storages/backends/s3boto3.py index c0bbf7419..62c090d9e 100644 --- a/storages/backends/s3boto3.py +++ b/storages/backends/s3boto3.py @@ -22,7 +22,7 @@ raise ImproperlyConfigured("Could not load Boto3's S3 bindings.\n" "See https://github.com/boto/boto3") -from storages.utils import setting +from storages.utils import setting, safe_join boto3_version_info = tuple([int(i) for i in boto3_version.split('.')]) @@ -31,39 +31,6 @@ "higher.\nSee https://github.com/boto/boto3") -def safe_join(base, *paths): - """ - A version of django.utils._os.safe_join for S3 paths. - - Joins one or more path components to the base path component - intelligently. Returns a normalized version of the final path. - - The final path must be located inside of the base path component - (otherwise a ValueError is raised). - - Paths outside the base path indicate a possible security - sensitive operation. - """ - base_path = force_text(base) - base_path = base_path.rstrip('/') - paths = [force_text(p) for p in paths] - - final_path = base_path - for path in paths: - final_path = urlparse.urljoin(final_path.rstrip('/') + "/", path) - - # Ensure final_path starts with base_path and that the next character after - # the final path is '/' (or nothing, in which case final_path must be - # equal to base_path). - base_path_len = len(base_path) - if (not final_path.startswith(base_path) or - final_path[base_path_len:base_path_len + 1] not in ('', '/')): - raise ValueError('the joined path is located outside of the base path' - ' component') - - return final_path.lstrip('/') - - @deconstructible class S3Boto3StorageFile(File): diff --git a/storages/utils.py b/storages/utils.py index 60967d869..3637f6590 100644 --- a/storages/utils.py +++ b/storages/utils.py @@ -3,7 +3,6 @@ from django.conf import settings from django.core.exceptions import ImproperlyConfigured from django.utils.encoding import force_text -from django.utils.six.moves.urllib import parse as urlparse def setting(name, default=None, strict=False): @@ -63,16 +62,15 @@ def safe_join(base, *paths): base_path = base_path.rstrip('/') paths = [force_text(p) for p in paths] - final_path = base_path - for path in paths: - final_path = urlparse.urljoin(final_path.rstrip('/') + '/', path) + final_path = posixpath.normpath(posixpath.join(base_path + '/', *paths)) + # posixpath.normpath() strips the trailing /. Add it back. + if paths[-1].endswith('/'): + final_path += '/' # Ensure final_path starts with base_path and that the next character after - # the final path is '/' (or nothing, in which case final_path must be - # equal to base_path). + # the final path is /. base_path_len = len(base_path) - if (not final_path.startswith(base_path) or - final_path[base_path_len:base_path_len + 1] not in ('', '/')): + if (not final_path.startswith(base_path) or final_path[base_path_len] != '/'): raise ValueError('the joined path is located outside of the base path' ' component') diff --git a/tests/test_s3boto3.py b/tests/test_s3boto3.py index 9bb81d4b0..52dfda93c 100644 --- a/tests/test_s3boto3.py +++ b/tests/test_s3boto3.py @@ -18,11 +18,6 @@ from storages.backends import s3boto3 -__all__ = ( - 'SafeJoinTest', - 'S3Boto3StorageTests', -) - class S3Boto3TestCase(TestCase): def setUp(self): @@ -30,43 +25,6 @@ def setUp(self): self.storage._connection = mock.MagicMock() -class SafeJoinTest(TestCase): - def test_normal(self): - path = s3boto3.safe_join("", "path/to/somewhere", "other", "path/to/somewhere") - self.assertEqual(path, "path/to/somewhere/other/path/to/somewhere") - - def test_with_dot(self): - path = s3boto3.safe_join("", "path/./somewhere/../other", "..", - ".", "to/./somewhere") - self.assertEqual(path, "path/to/somewhere") - - def test_base_url(self): - path = s3boto3.safe_join("base_url", "path/to/somewhere") - self.assertEqual(path, "base_url/path/to/somewhere") - - def test_base_url_with_slash(self): - path = s3boto3.safe_join("base_url/", "path/to/somewhere") - self.assertEqual(path, "base_url/path/to/somewhere") - - def test_suspicious_operation(self): - self.assertRaises(ValueError, - s3boto3.safe_join, "base", "../../../../../../../etc/passwd") - - def test_trailing_slash(self): - """ - Test safe_join with paths that end with a trailing slash. - """ - path = s3boto3.safe_join("base_url/", "path/to/somewhere/") - self.assertEqual(path, "base_url/path/to/somewhere/") - - def test_trailing_slash_multi(self): - """ - Test safe_join with multiple paths that end with a trailing slash. - """ - path = s3boto3.safe_join("base_url/", "path/to/" "somewhere/") - self.assertEqual(path, "base_url/path/to/somewhere/") - - class S3Boto3StorageTests(S3Boto3TestCase): def test_clean_name(self): diff --git a/tests/test_utils.py b/tests/test_utils.py index 5f0a118c2..af14a4cb5 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -1,3 +1,5 @@ +import datetime + from django.test import TestCase from django.conf import settings from django.core.exceptions import ImproperlyConfigured @@ -65,8 +67,10 @@ def test_base_url_with_slash(self): self.assertEqual(path, "base_url/path/to/somewhere") def test_suspicious_operation(self): - self.assertRaises(ValueError, - utils.safe_join, "base", "../../../../../../../etc/passwd") + with self.assertRaises(ValueError): + utils.safe_join("base", "../../../../../../../etc/passwd") + with self.assertRaises(ValueError): + utils.safe_join("base", "/etc/passwd") def test_trailing_slash(self): """ @@ -79,5 +83,10 @@ def test_trailing_slash_multi(self): """ Test safe_join with multiple paths that end with a trailing slash. """ - path = utils.safe_join("base_url/", "path/to/" "somewhere/") + path = utils.safe_join("base_url/", "path/to/", "somewhere/") self.assertEqual(path, "base_url/path/to/somewhere/") + + def test_datetime_isoformat(self): + dt = datetime.datetime(2017, 5, 19, 14, 45, 37, 123456) + path = utils.safe_join('base_url', dt.isoformat()) + self.assertEqual(path, 'base_url/2017-05-19T14:45:37.123456') From 1755392f9e3d9ec3ed017a308bcf0baed831a6f4 Mon Sep 17 00:00:00 2001 From: Jon Dufresne Date: Mon, 5 Jun 2017 07:15:07 -0700 Subject: [PATCH 127/174] Remove unnecessary __all__ from test files --- tests/test_dropbox.py | 5 ----- tests/test_s3boto.py | 4 ---- 2 files changed, 9 deletions(-) diff --git a/tests/test_dropbox.py b/tests/test_dropbox.py index a29d10468..8b6b093e6 100644 --- a/tests/test_dropbox.py +++ b/tests/test_dropbox.py @@ -55,11 +55,6 @@ 'expires': 'Fri, 16 Sep 2011 01:01:25 +0000', } -__all__ = [ - 'DropBoxTest', - 'DropBoxFileTest' -] - class DropBoxTest(TestCase): @mock.patch('dropbox.client._OAUTH2_ACCESS_TOKEN_PATTERN', diff --git a/tests/test_s3boto.py b/tests/test_s3boto.py index 685c2d3aa..07a3f40f3 100644 --- a/tests/test_s3boto.py +++ b/tests/test_s3boto.py @@ -16,10 +16,6 @@ from storages.backends import s3boto -__all__ = ( - 'S3BotoStorageTests', -) - class S3BotoTestCase(TestCase): @mock.patch('storages.backends.s3boto.S3Connection') From 153aa40456dbd7c227ea6ab12e4311bfd07eae76 Mon Sep 17 00:00:00 2001 From: Stanislav Kaledin Date: Wed, 7 Jun 2017 18:12:36 +0300 Subject: [PATCH 128/174] Fix minor bug from PR #325 (#330) * Fix issue #118 * Fix error when self._known_host_file is None and None is passed to load_host_keys * Update test_sftp.py --- storages/backends/sftpstorage.py | 3 ++- tests/test_sftp.py | 10 ++++++++++ 2 files changed, 12 insertions(+), 1 deletion(-) diff --git a/storages/backends/sftpstorage.py b/storages/backends/sftpstorage.py index 0a5b6e27d..cb13507cc 100644 --- a/storages/backends/sftpstorage.py +++ b/storages/backends/sftpstorage.py @@ -58,7 +58,8 @@ def _connect(self): ) if os.path.exists(known_host_file): - self._ssh.load_host_keys(self._known_host_file) + self._ssh.load_host_keys(known_host_file) + # and automatically add new host keys for hosts we haven't seen before. self._ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy()) diff --git a/tests/test_sftp.py b/tests/test_sftp.py index 29acd4069..39dd78800 100644 --- a/tests/test_sftp.py +++ b/tests/test_sftp.py @@ -1,9 +1,12 @@ +import os import stat from datetime import datetime + try: from unittest.mock import patch, MagicMock except ImportError: # Python 3.2 and below from mock import patch, MagicMock + from django.test import TestCase from django.core.files.base import File from django.utils.six import BytesIO @@ -23,6 +26,13 @@ def test_no_known_hosts_file(self, mock_ssh): self.storage._connect() self.assertEqual('foo', mock_ssh.return_value.connect.call_args[0][0]) + @patch.object(os.path, "expanduser", return_value="/path/to/known_hosts") + @patch.object(os.path, "exists", return_value=True) + @patch('paramiko.SSHClient') + def test_error_when_known_hosts_file_not_defined(self, mock_ssh, *a): + self.storage._connect() + self.storage._ssh.load_host_keys.assert_called_once_with("/path/to/known_hosts") + @patch('paramiko.SSHClient') def test_connect(self, mock_ssh): self.storage._connect() From 259beac2861b56172f54f21983f96b85a3298ba1 Mon Sep 17 00:00:00 2001 From: Josh Schneier Date: Wed, 7 Jun 2017 12:26:12 -0400 Subject: [PATCH 129/174] Make linter happy, update CHANGELOG for #322 and #325 --- CHANGELOG.rst | 7 ++++++- storages/backends/sftpstorage.py | 1 - 2 files changed, 6 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.rst b/CHANGELOG.rst index 47b07c1a5..7e85854a1 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -7,7 +7,7 @@ django-storages change log * **Breaking:** Remove backends deprecated in v1.5.1 (`#280`_) * **Breaking:** The ``SFTPStorage`` backend now checks for the existence of the fallback ``~/.ssh/known_hosts`` before attempting to load it. If you had previously been passing in a path to a non-existent file it will no longer - attempt to load the fallback. + attempt to load the fallback. (`issue #118`_ `pr #325`_) * **Deprecation:** The undocumented ``gs.GSBotoStorage`` backend. See the new ``gcloud.GoogleCloudStorage`` or ``apache_libcloud.LibCloudStorage`` backends instead. (`#236`_) * Add a new backend, ``gcloud.GoogleCloudStorage`` based on the ``google-cloud`` bindings. (`#236`_ thanks @scjody) @@ -21,6 +21,7 @@ django-storages change log * Files that have a guessable encoding (e.g. gzip or compress) will be uploaded with that Content-Encoding in the ``s3boto3`` backend (issue `#263`_ pr `#264`_ thanks @ldng) * The Dropbox backend now properly translates backslashes in Windows paths into forward slashes (`e52a127`_) +* The S3 backends now permit colons in the keys (`issue #248`_ `pr #322`_) .. _#217: https://github.com/jschneier/django-storages/pull/217 .. _#216: https://github.com/jschneier/django-storages/issues/216 @@ -35,6 +36,10 @@ django-storages change log .. _#264: https://github.com/jschneier/django-storages/pull/264 .. _e52a127: https://github.com/jschneier/django-storages/commit/e52a127523fdd5be50bb670ccad566c5d527f3d1 .. _#236: https://github.com/jschneier/django-storages/pull/236 +.. _issue #118: https://github.com/jschneier/django-storages/issues/118 +.. _pr #325: https://github.com/jschneier/django-storages/pull/325 +.. _issue #248: https://github.com/jschneier/django-storages/issues/248 +.. _pr #322: https://github.com/jschneier/django-storages/pull/322 1.5.2 (2017-01-13) ****************** diff --git a/storages/backends/sftpstorage.py b/storages/backends/sftpstorage.py index cb13507cc..340ed98e2 100644 --- a/storages/backends/sftpstorage.py +++ b/storages/backends/sftpstorage.py @@ -60,7 +60,6 @@ def _connect(self): if os.path.exists(known_host_file): self._ssh.load_host_keys(known_host_file) - # and automatically add new host keys for hosts we haven't seen before. self._ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy()) From f552af8f4d2e302ab06fdcd19b0b9c7c0e4c18ed Mon Sep 17 00:00:00 2001 From: Lucas Rangel Cezimbra Date: Fri, 2 Jun 2017 02:32:32 -0300 Subject: [PATCH 130/174] Update dropbox.rst --- docs/backends/dropbox.rst | 14 ++++++++++++++ 1 file changed, 14 insertions(+) diff --git a/docs/backends/dropbox.rst b/docs/backends/dropbox.rst index 5fa925e6c..6b7aa1fbc 100644 --- a/docs/backends/dropbox.rst +++ b/docs/backends/dropbox.rst @@ -1,9 +1,22 @@ DropBox ======= +A custom storage system for Django using Dropbox Storage backend. + +Before you start configuration, you will need to install `Dropbox SDK for Python`_. + + +Install the package:: + + pip install dropbox + Settings -------- +To use DropBoxStorage set:: + + DEFAULT_FILE_STORAGE = 'storages.backends.dropbox.DropBoxStorage' + ``DROPBOX_OAUTH2_TOKEN`` Your DropBox token, if you haven't follow this `guide step`_. @@ -11,3 +24,4 @@ Settings Allow to jail your storage to a defined directory. .. _`guide step`: https://www.dropbox.com/developers/documentation/python#tutorial +.. _`Dropbox SDK for Python`: https://www.dropbox.com/developers/documentation/python#tutorial From d8e4afc8b4e59e793173732da4aa32c2b1ecd548 Mon Sep 17 00:00:00 2001 From: Fabien Schwob Date: Thu, 8 Jun 2017 14:09:09 +0200 Subject: [PATCH 131/174] Improved S3 docs about Signature version --- docs/backends/amazon-S3.rst | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/docs/backends/amazon-S3.rst b/docs/backends/amazon-S3.rst index 1dbd04649..32d19db83 100644 --- a/docs/backends/amazon-S3.rst +++ b/docs/backends/amazon-S3.rst @@ -98,8 +98,13 @@ Available are numerous settings. It should be especially noted the following: ``AWS_S3_CALLING_FORMAT`` (optional: default is ``SubdomainCallingFormat()``) Defines the S3 calling format to use to connect to the static bucket. -.. _query parameter authentication: https://docs.aws.amazon.com/AmazonS3/latest/API/sigv4-query-string-auth.html +``AWS_S3_SIGNATURE_VERSION`` (optional: no default value) + Defines the S3 signature version to use. For example for + `AWS Signature Version 4`_ you should use ``'s3v4'``. You can see + `which S3 regions are compatible with each signature version`_. +.. _AWS Signature Version 4: https://docs.aws.amazon.com/AmazonS3/latest/API/sigv4-query-string-auth.html +.. _which S3 regions are compatible with each signature version: http://docs.aws.amazon.com/general/latest/gr/rande.html#s3_region CloudFront ~~~~~~~~~~ From b70e42c2d0e1af5685ba79b36d1fbde21307a38c Mon Sep 17 00:00:00 2001 From: Luke Burden Date: Fri, 16 Jun 2017 10:58:34 -0700 Subject: [PATCH 132/174] Avoid incorrect behaviour of S3BotoStorage.exists Calling `S3BotoStorage._save` writes to `_entries` if `preload_metadata` is True. Future access to `_entries` is limited as the property `entries` only loads metadata for the bucket if `_entries` is empty, which due to `_save` it may not be. This in turn causes `exists` to incorrectly return False for existent files. --- storages/backends/s3boto.py | 12 +++++++++--- 1 file changed, 9 insertions(+), 3 deletions(-) diff --git a/storages/backends/s3boto.py b/storages/backends/s3boto.py index f7d642648..99520cbe3 100644 --- a/storages/backends/s3boto.py +++ b/storages/backends/s3boto.py @@ -233,6 +233,7 @@ def __init__(self, acl=None, bucket=None, **settings): self._entries = {} self._bucket = None self._connection = None + self._loaded_meta = False self.security_token = None if not self.access_key and not self.secret_key: @@ -270,9 +271,14 @@ def entries(self): """ Get the locally cached files for the bucket. """ - if self.preload_metadata and not self._entries: - self._entries = dict((self._decode_name(entry.key), entry) - for entry in self.bucket.list(prefix=self.location)) + if self.preload_metadata and not self._loaded_meta: + self._entries.update( + dict( + (self._decode_name(entry.key), entry) + for entry in self.bucket.list(prefix=self.location) + ) + ) + self._loaded_meta = True return self._entries def _lookup_env(self, names): From e46f3067fbc0ba9f8d1c351026a59aa9e0a390eb Mon Sep 17 00:00:00 2001 From: Josh Schneier Date: Tue, 20 Jun 2017 19:18:30 -0400 Subject: [PATCH 133/174] Dropbox api upgrade (#273) * moved to dropbox v2 api * replace client.metadata with dropbox v2 call * replace new dropbox v2 error * replace dropbox v1 methods with v2 equivalents * Replace final deprecated dropbox methods, fix the tests, upgrade the required version --- requirements-tests.txt | 2 +- storages/backends/dropbox.py | 28 ++++++++++++++-------------- tests/test_dropbox.py | 34 ++++++++++++++-------------------- 3 files changed, 29 insertions(+), 35 deletions(-) diff --git a/requirements-tests.txt b/requirements-tests.txt index cf7218eca..2637f7d13 100644 --- a/requirements-tests.txt +++ b/requirements-tests.txt @@ -1,7 +1,7 @@ boto3>=1.2.3 boto>=2.32.0 +dropbox>=7.21 Django>=1.8 -dropbox>=3.24 flake8 google-cloud>=0.25.0 mock diff --git a/storages/backends/dropbox.py b/storages/backends/dropbox.py index 1808633b2..a1fbfe2bc 100644 --- a/storages/backends/dropbox.py +++ b/storages/backends/dropbox.py @@ -22,8 +22,8 @@ from storages.utils import setting -from dropbox.client import DropboxClient -from dropbox.rest import ErrorResponse +from dropbox import Dropbox +from dropbox.exceptions import ApiError DATE_FORMAT = '%a, %d %b %Y %X +0000' @@ -40,7 +40,7 @@ def __init__(self, name, storage): @property def file(self): if not hasattr(self, '_file'): - response = self._storage.client.get_file(self.name) + response = self._storage.client.files_download(self.name) self._file = SpooledTemporaryFile() copyfileobj(response, self._file) self._file.seek(0) @@ -57,7 +57,7 @@ def __init__(self, oauth2_access_token=None, root_path=None): if oauth2_access_token is None: raise ImproperlyConfigured("You must configure a token auth at" "'settings.DROPBOX_OAUTH2_TOKEN'.") - self.client = DropboxClient(oauth2_access_token) + self.client = Dropbox(oauth2_access_token) def _full_path(self, name): if name == '/': @@ -65,18 +65,18 @@ def _full_path(self, name): return safe_join(self.root_path, name).replace('\\', '/') def delete(self, name): - self.client.file_delete(self._full_path(name)) + self.client.files_delete(self._full_path(name)) def exists(self, name): try: - return bool(self.client.metadata(self._full_path(name))) - except ErrorResponse: + return bool(self.client.files_get_metadata(self._full_path(name))) + except ApiError: return False def listdir(self, path): directories, files = [], [] full_path = self._full_path(path) - metadata = self.client.metadata(full_path) + metadata = self.client.files_get_metadata(full_path) for entry in metadata['contents']: entry['path'] = entry['path'].replace(full_path, '', 1) entry['path'] = entry['path'].replace('/', '', 1) @@ -87,27 +87,27 @@ def listdir(self, path): return directories, files def size(self, name): - metadata = self.client.metadata(self._full_path(name)) + metadata = self.client.files_get_metadata(self._full_path(name)) return metadata['bytes'] def modified_time(self, name): - metadata = self.client.metadata(self._full_path(name)) + metadata = self.client.files_get_metadata(self._full_path(name)) mod_time = datetime.strptime(metadata['modified'], DATE_FORMAT) return mod_time def accessed_time(self, name): - metadata = self.client.metadata(self._full_path(name)) + metadata = self.client.files_get_metadata(self._full_path(name)) acc_time = datetime.strptime(metadata['client_mtime'], DATE_FORMAT) return acc_time def url(self, name): - media = self.client.media(self._full_path(name)) - return media['url'] + media = self.client.files_get_temporary_link(self._full_path(name)) + return media['link'] def _open(self, name, mode='rb'): remote_file = DropBoxFile(self._full_path(name), self) return remote_file def _save(self, name, content): - self.client.put_file(self._full_path(name), content) + self.client.files_upload(content, self._full_path(name)) return name diff --git a/tests/test_dropbox.py b/tests/test_dropbox.py index 8b6b093e6..e212b0dce 100644 --- a/tests/test_dropbox.py +++ b/tests/test_dropbox.py @@ -8,7 +8,7 @@ from django.test import TestCase from django.core.files.base import File, ContentFile from django.core.exceptions import ImproperlyConfigured, \ - SuspiciousFileOperation + SuspiciousFileOperation from storages.backends import dropbox @@ -51,7 +51,7 @@ 'thumb_exists': False } FILE_MEDIA_FIXTURE = { - 'url': 'https://dl.dropboxusercontent.com/1/view/foo', + 'link': 'https://dl.dropboxusercontent.com/1/view/foo', 'expires': 'Fri, 16 Sep 2011 01:01:25 +0000', } @@ -67,24 +67,24 @@ def test_no_access_token(self, *args): with self.assertRaises(ImproperlyConfigured): dropbox.DropBoxStorage(None) - @mock.patch('dropbox.client.DropboxClient.file_delete', + @mock.patch('dropbox.Dropbox.files_delete', return_value=FILE_FIXTURE) def test_delete(self, *args): self.storage.delete('foo') - @mock.patch('dropbox.client.DropboxClient.metadata', + @mock.patch('dropbox.Dropbox.files_get_metadata', return_value=[FILE_FIXTURE]) def test_exists(self, *args): exists = self.storage.exists('foo') self.assertTrue(exists) - @mock.patch('dropbox.client.DropboxClient.metadata', + @mock.patch('dropbox.Dropbox.files_get_metadata', return_value=[]) def test_not_exists(self, *args): exists = self.storage.exists('bar') self.assertFalse(exists) - @mock.patch('dropbox.client.DropboxClient.metadata', + @mock.patch('dropbox.Dropbox.files_get_metadata', return_value=FILES_FIXTURE) def test_listdir(self, *args): dirs, files = self.storage.listdir('/') @@ -93,19 +93,19 @@ def test_listdir(self, *args): self.assertEqual(dirs[0], 'bar') self.assertEqual(files[0], 'foo.txt') - @mock.patch('dropbox.client.DropboxClient.metadata', + @mock.patch('dropbox.Dropbox.files_get_metadata', return_value=FILE_FIXTURE) def test_size(self, *args): size = self.storage.size('foo') self.assertEqual(size, FILE_FIXTURE['bytes']) - @mock.patch('dropbox.client.DropboxClient.metadata', + @mock.patch('dropbox.Dropbox.files_get_metadata', return_value=FILE_FIXTURE) def test_modified_time(self, *args): mtime = self.storage.modified_time('foo') self.assertEqual(mtime, FILE_DATE) - @mock.patch('dropbox.client.DropboxClient.metadata', + @mock.patch('dropbox.Dropbox.files_get_metadata', return_value=FILE_FIXTURE) def test_accessed_time(self, *args): mtime = self.storage.accessed_time('foo') @@ -115,16 +115,16 @@ def test_open(self, *args): obj = self.storage._open('foo') self.assertIsInstance(obj, File) - @mock.patch('dropbox.client.DropboxClient.put_file', + @mock.patch('dropbox.Dropbox.files_upload', return_value='foo') def test_save(self, *args): self.storage._save('foo', b'bar') - @mock.patch('dropbox.client.DropboxClient.media', + @mock.patch('dropbox.Dropbox.files_get_temporary_link', return_value=FILE_MEDIA_FIXTURE) def test_url(self, *args): url = self.storage.url('foo') - self.assertEqual(url, FILE_MEDIA_FIXTURE['url']) + self.assertEqual(url, FILE_MEDIA_FIXTURE['link']) def test_formats(self, *args): self.storage = dropbox.DropBoxStorage('foo') @@ -136,24 +136,18 @@ def test_formats(self, *args): class DropBoxFileTest(TestCase): - @mock.patch('dropbox.client._OAUTH2_ACCESS_TOKEN_PATTERN', - re.compile(r'.*')) - @mock.patch('dropbox.client.DropboxOAuth2Session') def setUp(self, *args): self.storage = dropbox.DropBoxStorage('foo') self.file = dropbox.DropBoxFile('/foo.txt', self.storage) - @mock.patch('dropbox.client.DropboxClient.get_file', + @mock.patch('dropbox.Dropbox.files_download', return_value=ContentFile(b'bar')) def test_read(self, *args): file = self.storage._open(b'foo') self.assertEqual(file.read(), b'bar') -@mock.patch('dropbox.client._OAUTH2_ACCESS_TOKEN_PATTERN', - re.compile(r'.*')) -@mock.patch('dropbox.client.DropboxOAuth2Session') -@mock.patch('dropbox.client.DropboxClient.metadata', +@mock.patch('dropbox.Dropbox.files_get_metadata', return_value={'contents': []}) class DropBoxRootPathTest(TestCase): def test_jailed(self, *args): From 50dec4a83b8ada3a0222a38e9d2efedaac8d9705 Mon Sep 17 00:00:00 2001 From: Josh Schneier Date: Tue, 20 Jun 2017 19:22:28 -0400 Subject: [PATCH 134/174] Update CHANGELOG for #273, remove thanks noise --- CHANGELOG.rst | 17 ++++++++++------- 1 file changed, 10 insertions(+), 7 deletions(-) diff --git a/CHANGELOG.rst b/CHANGELOG.rst index 7e85854a1..bb307824c 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -5,25 +5,28 @@ django-storages change log ****************** * **Breaking:** Remove backends deprecated in v1.5.1 (`#280`_) +* **Breaking:** ``DropBoxStorage`` has been upgrade to support v2 of the API, v1 will be shut off at the + end of the month - upgrading is recommended (`#273`_) * **Breaking:** The ``SFTPStorage`` backend now checks for the existence of the fallback ``~/.ssh/known_hosts`` before attempting to load it. If you had previously been passing in a path to a non-existent file it will no longer attempt to load the fallback. (`issue #118`_ `pr #325`_) * **Deprecation:** The undocumented ``gs.GSBotoStorage`` backend. See the new ``gcloud.GoogleCloudStorage`` or ``apache_libcloud.LibCloudStorage`` backends instead. (`#236`_) -* Add a new backend, ``gcloud.GoogleCloudStorage`` based on the ``google-cloud`` bindings. (`#236`_ thanks @scjody) -* Pass in the location constraint when auto creating a bucket (`#257`_, `#258`_ thanks @mattayes) +* Add a new backend, ``gcloud.GoogleCloudStorage`` based on the ``google-cloud`` bindings. (`#236`_) +* Pass in the location constraint when auto creating a bucket in ``S3Boto3Storage`` (`#257`_, `#258`_) * Add support for reading ``AWS_SESSION_TOKEN`` and ``AWS_SECURITY_TOKEN`` from the environment - to ``S3Boto3Storage`` and ``S3BotoStorage``. (`#283`_ thanks @bxm156) -* Fix Boto3 non-ascii filenames on Python 2.7 (`#216`_, `#217`_ thanks @AGASS007) + to ``S3Boto3Storage`` and ``S3BotoStorage``. (`#283`_) +* Fix Boto3 non-ascii filenames on Python 2.7 (`#216`_, `#217`_) * Fix ``collectstatic`` timezone handling in and add ``get_modified_time`` to ``S3BotoStorage`` (`#290`_) -* Add support for Django 1.11 (`#295`_ thanks @jdufresne) -* Add ``project`` keyword support to GCS in ``LibCloudStorage`` backend (`#269`_ thanks @slightfoot) +* Add support for Django 1.11 (`#295`_) +* Add ``project`` keyword support to GCS in ``LibCloudStorage`` backend (`#269`_) * Files that have a guessable encoding (e.g. gzip or compress) will be uploaded with that Content-Encoding in - the ``s3boto3`` backend (issue `#263`_ pr `#264`_ thanks @ldng) + the ``s3boto3`` backend (issue `#263`_ pr `#264`_) * The Dropbox backend now properly translates backslashes in Windows paths into forward slashes (`e52a127`_) * The S3 backends now permit colons in the keys (`issue #248`_ `pr #322`_) .. _#217: https://github.com/jschneier/django-storages/pull/217 +.. _#273: https://github.com/jschneier/django-storages/pull/273 .. _#216: https://github.com/jschneier/django-storages/issues/216 .. _#283: https://github.com/jschneier/django-storages/pull/283 .. _#280: https://github.com/jschneier/django-storages/pull/280 From 75a9bf6bd92dddbb1aef87572b1b4d501de9d726 Mon Sep 17 00:00:00 2001 From: Josh Schneier Date: Wed, 21 Jun 2017 11:15:53 -0400 Subject: [PATCH 135/174] Default signature version (#335) * Default AWS_S3_SIGNATURE_VERSION to v4 * Add documentation and CHANGELOG for new default * Add documentation on always using Sig v4 --- CHANGELOG.rst | 3 +++ docs/backends/amazon-S3.rst | 26 +++++++++++++++++++------- storages/backends/s3boto3.py | 2 +- 3 files changed, 23 insertions(+), 8 deletions(-) diff --git a/CHANGELOG.rst b/CHANGELOG.rst index bb307824c..4ef1d2d78 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -10,6 +10,8 @@ django-storages change log * **Breaking:** The ``SFTPStorage`` backend now checks for the existence of the fallback ``~/.ssh/known_hosts`` before attempting to load it. If you had previously been passing in a path to a non-existent file it will no longer attempt to load the fallback. (`issue #118`_ `pr #325`_) +* **Breaking:** The default version value for ``AWS_S3_SIGNATURE_VERSION`` is now ``'s3v4'``. No changes should + be required (`#335`_) * **Deprecation:** The undocumented ``gs.GSBotoStorage`` backend. See the new ``gcloud.GoogleCloudStorage`` or ``apache_libcloud.LibCloudStorage`` backends instead. (`#236`_) * Add a new backend, ``gcloud.GoogleCloudStorage`` based on the ``google-cloud`` bindings. (`#236`_) @@ -43,6 +45,7 @@ django-storages change log .. _pr #325: https://github.com/jschneier/django-storages/pull/325 .. _issue #248: https://github.com/jschneier/django-storages/issues/248 .. _pr #322: https://github.com/jschneier/django-storages/pull/322 +.. _#335: https://github.com/jschneier/django-storages/pull/335 1.5.2 (2017-01-13) ****************** diff --git a/docs/backends/amazon-S3.rst b/docs/backends/amazon-S3.rst index 32d19db83..7689b6ce6 100644 --- a/docs/backends/amazon-S3.rst +++ b/docs/backends/amazon-S3.rst @@ -14,6 +14,10 @@ for the forseeable future. For historical completeness an extreme legacy backend was removed in version 1.2 +If using the boto backend it is recommended that you configure it to also use +`AWS Signature Version 4`_. This can be done by adding ``S3_USE_SIGV4 = True`` to +your settings and setting the ``AWS_S3_HOST`` configuration option. + Settings -------- @@ -68,8 +72,7 @@ Available are numerous settings. It should be especially noted the following: public. ``AWS_QUERYSTRING_EXPIRE`` (optional; default is 3600 seconds) - The number of seconds that a generated URL with `query parameter - authentication`_ is valid for. + The number of seconds that a generated URL is valid for. ``AWS_S3_ENCRYPTION`` (optional; default is ``False``) Enable server-side file encryption while at rest, by setting ``encrypt_key`` parameter to True. More info available here: http://boto.cloudhackers.com/en/latest/ref/s3.html @@ -77,6 +80,12 @@ Available are numerous settings. It should be especially noted the following: ``AWS_S3_FILE_OVERWRITE`` (optional: default is ``True``) By default files with the same name will overwrite each other. Set this to ``False`` to have extra characters appended. +``AWS_S3_HOST`` (optional - boto only, default is ``s3.amazonaws.com``) + + To ensure you use `AWS Signature Version 4`_ it is recommended to set this to the host of your bucket. See the + `mapping of region to endpoint names`_ to figure out the appropriate endpoint for your bucket. Also be sure to + add ``S3_USE_SIGV4 = True`` to settings.py + ``AWS_LOCATION`` (optional: default is `''`) A path prefix that will be prepended to all uploads @@ -98,13 +107,15 @@ Available are numerous settings. It should be especially noted the following: ``AWS_S3_CALLING_FORMAT`` (optional: default is ``SubdomainCallingFormat()``) Defines the S3 calling format to use to connect to the static bucket. -``AWS_S3_SIGNATURE_VERSION`` (optional: no default value) - Defines the S3 signature version to use. For example for - `AWS Signature Version 4`_ you should use ``'s3v4'``. You can see - `which S3 regions are compatible with each signature version`_. +``AWS_S3_SIGNATURE_VERSION`` (optional - boto3 only: default is ``s3v4``) + All AWS regions support the v4 version of the signing protocol. To use the legacy v2 set this to ``'s3'``. Some non-Amazon S3 + implementations might require this change. + +.. _query parameter authentication: https://docs.aws.amazon.com/AmazonS3/latest/API/sigv4-query-string-auth.html .. _AWS Signature Version 4: https://docs.aws.amazon.com/AmazonS3/latest/API/sigv4-query-string-auth.html -.. _which S3 regions are compatible with each signature version: http://docs.aws.amazon.com/general/latest/gr/rande.html#s3_region +.. _mapping of region to endpoint names: http://docs.aws.amazon.com/general/latest/gr/rande.html#s3_region + CloudFront ~~~~~~~~~~ @@ -112,6 +123,7 @@ If you're using S3 as a CDN (via CloudFront), you'll probably want this storage to serve those files using that:: AWS_S3_CUSTOM_DOMAIN = 'cdn.mydomain.com' + **NOTE:** Django's `STATIC_URL` `must end in a slash`_ and the `AWS_S3_CUSTOM_DOMAIN` *must not*. It is best to set this variable indepedently of `STATIC_URL`. .. _must end in a slash: https://docs.djangoproject.com/en/dev/ref/settings/#static-url diff --git a/storages/backends/s3boto3.py b/storages/backends/s3boto3.py index 62c090d9e..1a1badb1f 100644 --- a/storages/backends/s3boto3.py +++ b/storages/backends/s3boto3.py @@ -188,7 +188,7 @@ class S3Boto3Storage(Storage): bucket_acl = setting('AWS_BUCKET_ACL', default_acl) querystring_auth = setting('AWS_QUERYSTRING_AUTH', True) querystring_expire = setting('AWS_QUERYSTRING_EXPIRE', 3600) - signature_version = setting('AWS_S3_SIGNATURE_VERSION') + signature_version = setting('AWS_S3_SIGNATURE_VERSION', 's3v4') reduced_redundancy = setting('AWS_REDUCED_REDUNDANCY', False) location = setting('AWS_LOCATION', '') encryption = setting('AWS_S3_ENCRYPTION', False) From 5ab340d9de30305833386758ba33d5c22bd387d8 Mon Sep 17 00:00:00 2001 From: Josh Schneier Date: Wed, 21 Jun 2017 11:23:31 -0400 Subject: [PATCH 136/174] Release version 1.6 --- CHANGELOG.rst | 2 +- storages/__init__.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.rst b/CHANGELOG.rst index 4ef1d2d78..dc67d033a 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -1,7 +1,7 @@ django-storages change log ========================== -1.6.0 (XXXX-XX-XX) +1.6 (2017-06-21) ****************** * **Breaking:** Remove backends deprecated in v1.5.1 (`#280`_) diff --git a/storages/__init__.py b/storages/__init__.py index c3b384154..6d5e09d89 100644 --- a/storages/__init__.py +++ b/storages/__init__.py @@ -1 +1 @@ -__version__ = '1.5.2' +__version__ = '1.6' From e89db451d7e617638b5991e31df4c8de196546a6 Mon Sep 17 00:00:00 2001 From: Josh Schneier Date: Wed, 21 Jun 2017 11:39:38 -0400 Subject: [PATCH 137/174] Drop support for Django 1.9 --- .travis.yml | 6 ------ setup.py | 1 - tox.ini | 2 -- 3 files changed, 9 deletions(-) diff --git a/.travis.yml b/.travis.yml index 7cba94521..cbc048a28 100644 --- a/.travis.yml +++ b/.travis.yml @@ -13,12 +13,6 @@ matrix: env: TOX_ENV=py34-django18 - python: 3.5 env: TOX_ENV=py35-django18 - - python: 2.7 - env: TOX_ENV=py27-django19 - - python: 3.4 - env: TOX_ENV=py34-django19 - - python: 3.5 - env: TOX_ENV=py35-django19 - python: 2.7 env: TOX_ENV=py27-django110 - python: 3.4 diff --git a/setup.py b/setup.py index ed759e6b6..d2cd95045 100644 --- a/setup.py +++ b/setup.py @@ -27,7 +27,6 @@ def get_requirements_tests(): 'Environment :: Web Environment', 'Framework :: Django', 'Framework :: Django :: 1.8', - 'Framework :: Django :: 1.9', 'Framework :: Django :: 1.10', 'Framework :: Django :: 1.11', 'Intended Audience :: Developers', diff --git a/tox.ini b/tox.ini index ba37097f0..40414722d 100644 --- a/tox.ini +++ b/tox.ini @@ -2,7 +2,6 @@ envlist = flake8 {py27,py33,py34,py35}-django18, - {py27,py34,py35}-django19 {py27,py34,py35}-django110 {py27,py34,py35,py36}-django111 @@ -14,7 +13,6 @@ setenv = DJANGO_SETTINGS_MODULE=tests.settings deps = django18: Django>=1.8, <1.9 - django19: Django>=1.9, <1.10 django110: Django>=1.10, <1.11 django111: Django>=1.11, <2.0 py27: mock From 39a2a7a8c08eadd2952cab485ea76434ae86b12f Mon Sep 17 00:00:00 2001 From: Jon Dufresne Date: Wed, 21 Jun 2017 21:59:08 -0700 Subject: [PATCH 138/174] Fix safe_join to allow joining a base path with an empty string (#336) Regression introduced in 895a068fcf3f23a6b71294c5b4b67a822df6e642 --- CHANGELOG.rst | 7 ++++++- storages/utils.py | 6 +++--- tests/test_utils.py | 4 ++++ 3 files changed, 13 insertions(+), 4 deletions(-) diff --git a/CHANGELOG.rst b/CHANGELOG.rst index dc67d033a..47a56982a 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -1,6 +1,12 @@ django-storages change log ========================== +1.6.1 (UNRELEASED) +****************** + +* Fix regression in ``safe_join()`` to allow joining a base path with an empty + string. + 1.6 (2017-06-21) ****************** @@ -363,4 +369,3 @@ since March 2013. .. _#89: https://bitbucket.org/david/django-storages/issue/89/112-broke-the-mosso-backend .. _pull request #5: https://bitbucket.org/david/django-storages/pull-request/5/fixed-path-bug-and-added-testcase-for - diff --git a/storages/utils.py b/storages/utils.py index 3637f6590..b1ff45d5d 100644 --- a/storages/utils.py +++ b/storages/utils.py @@ -60,15 +60,15 @@ def safe_join(base, *paths): """ base_path = force_text(base) base_path = base_path.rstrip('/') - paths = [force_text(p) for p in paths] + paths = [base_path + '/'] + [force_text(p) for p in paths if p] - final_path = posixpath.normpath(posixpath.join(base_path + '/', *paths)) + final_path = posixpath.normpath(posixpath.join(*paths)) # posixpath.normpath() strips the trailing /. Add it back. if paths[-1].endswith('/'): final_path += '/' # Ensure final_path starts with base_path and that the next character after - # the final path is /. + # the base path is /. base_path_len = len(base_path) if (not final_path.startswith(base_path) or final_path[base_path_len] != '/'): raise ValueError('the joined path is located outside of the base path' diff --git a/tests/test_utils.py b/tests/test_utils.py index af14a4cb5..d76697b4d 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -90,3 +90,7 @@ def test_datetime_isoformat(self): dt = datetime.datetime(2017, 5, 19, 14, 45, 37, 123456) path = utils.safe_join('base_url', dt.isoformat()) self.assertEqual(path, 'base_url/2017-05-19T14:45:37.123456') + + def test_join_empty_string(self): + path = utils.safe_join('base_url', '') + self.assertEqual(path, 'base_url/') From b3419b333d778361c4a3f51256be61de58d85944 Mon Sep 17 00:00:00 2001 From: Jon Dufresne Date: Wed, 21 Jun 2017 21:59:34 -0700 Subject: [PATCH 139/174] Integrate isort and use it to consistently sort imports across files (#338) Reduces need to think and order imports, simply let the tool do it. Provides more consistent code style across Python modules by always ordering imports a single way. Use same configuration as Django project. --- .travis.yml | 1 + setup.cfg | 9 +++++++++ setup.py | 1 + storages/backends/apache_libcloud.py | 4 ++-- storages/backends/azure_storage.py | 10 +++++----- storages/backends/dropbox.py | 9 ++++----- storages/backends/ftp.py | 6 +++--- storages/backends/gcloud.py | 3 ++- storages/backends/s3boto.py | 13 ++++++++----- storages/backends/s3boto3.py | 13 ++++++++----- storages/backends/sftpstorage.py | 4 ++-- tests/test_dropbox.py | 15 +++++++++------ tests/test_ftp.py | 2 +- tests/test_gcloud.py | 3 +-- tests/test_gs.py | 2 +- tests/test_s3boto.py | 11 +++++------ tests/test_s3boto3.py | 16 ++++++++-------- tests/test_sftp.py | 11 ++++++----- tests/test_utils.py | 3 ++- tox.ini | 5 +++++ 20 files changed, 83 insertions(+), 58 deletions(-) diff --git a/.travis.yml b/.travis.yml index cbc048a28..127b0f1c0 100644 --- a/.travis.yml +++ b/.travis.yml @@ -5,6 +5,7 @@ cache: pip matrix: include: - env: TOX_ENV=flake8 + - env: TOX_ENV=isort - python: 2.7 env: TOX_ENV=py27-django18 - python: 3.3 diff --git a/setup.cfg b/setup.cfg index 8431ecd5b..1a0dfbc1b 100644 --- a/setup.cfg +++ b/setup.cfg @@ -6,3 +6,12 @@ exclude = .tox, docs max-line-length = 119 + +[isort] +combine_as_imports = true +default_section = THIRDPARTY +include_trailing_comma = true +known_first_party = storages +line_length = 79 +multi_line_output = 5 +not_skip = __init__.py diff --git a/setup.py b/setup.py index d2cd95045..876059f6b 100644 --- a/setup.py +++ b/setup.py @@ -1,4 +1,5 @@ from setuptools import setup + import storages diff --git a/storages/backends/apache_libcloud.py b/storages/backends/apache_libcloud.py index 776b8ac45..a2e5dc2e3 100644 --- a/storages/backends/apache_libcloud.py +++ b/storages/backends/apache_libcloud.py @@ -4,11 +4,11 @@ import os from django.conf import settings +from django.core.exceptions import ImproperlyConfigured from django.core.files.base import File from django.core.files.storage import Storage -from django.core.exceptions import ImproperlyConfigured from django.utils.deconstruct import deconstructible -from django.utils.six import string_types, BytesIO +from django.utils.six import BytesIO, string_types from django.utils.six.moves.urllib.parse import urljoin try: diff --git a/storages/backends/azure_storage.py b/storages/backends/azure_storage.py index 19494c4b2..ea5d71c3f 100644 --- a/storages/backends/azure_storage.py +++ b/storages/backends/azure_storage.py @@ -1,14 +1,16 @@ -from datetime import datetime -import os.path import mimetypes +import os.path import time +from datetime import datetime from time import mktime -from django.core.files.base import ContentFile from django.core.exceptions import ImproperlyConfigured +from django.core.files.base import ContentFile from django.core.files.storage import Storage from django.utils.deconstruct import deconstructible +from storages.utils import setting + try: import azure # noqa except ImportError: @@ -24,8 +26,6 @@ from azure.storage import BlobService from azure import WindowsAzureMissingResourceError as AzureMissingResourceHttpError -from storages.utils import setting - def clean_name(name): return os.path.normpath(name).replace("\\", "/") diff --git a/storages/backends/dropbox.py b/storages/backends/dropbox.py index a1fbfe2bc..b8512b9a1 100644 --- a/storages/backends/dropbox.py +++ b/storages/backends/dropbox.py @@ -11,20 +11,19 @@ from __future__ import absolute_import from datetime import datetime -from tempfile import SpooledTemporaryFile from shutil import copyfileobj +from tempfile import SpooledTemporaryFile from django.core.exceptions import ImproperlyConfigured from django.core.files.base import File from django.core.files.storage import Storage -from django.utils.deconstruct import deconstructible from django.utils._os import safe_join - -from storages.utils import setting - +from django.utils.deconstruct import deconstructible from dropbox import Dropbox from dropbox.exceptions import ApiError +from storages.utils import setting + DATE_FORMAT = '%a, %d %b %Y %X +0000' diff --git a/storages/backends/ftp.py b/storages/backends/ftp.py index bb705a18c..0b28280ac 100644 --- a/storages/backends/ftp.py +++ b/storages/backends/ftp.py @@ -14,17 +14,17 @@ # class FTPTest(models.Model): # file = models.FileField(upload_to='a/b/c/', storage=fs) +import ftplib import os from datetime import datetime -import ftplib from django.conf import settings +from django.core.exceptions import ImproperlyConfigured from django.core.files.base import File from django.core.files.storage import Storage -from django.core.exceptions import ImproperlyConfigured from django.utils.deconstruct import deconstructible -from django.utils.six.moves.urllib import parse as urlparse from django.utils.six import BytesIO +from django.utils.six.moves.urllib import parse as urlparse from storages.utils import setting diff --git a/storages/backends/gcloud.py b/storages/backends/gcloud.py index e9c21ea07..f7d2d8ad8 100644 --- a/storages/backends/gcloud.py +++ b/storages/backends/gcloud.py @@ -3,9 +3,10 @@ from django.core.exceptions import ImproperlyConfigured from django.core.files.base import File from django.core.files.storage import Storage +from django.utils import timezone from django.utils.deconstruct import deconstructible from django.utils.encoding import force_bytes, smart_str -from django.utils import timezone + from storages.utils import clean_name, safe_join, setting try: diff --git a/storages/backends/s3boto.py b/storages/backends/s3boto.py index 99520cbe3..c48ef10d0 100644 --- a/storages/backends/s3boto.py +++ b/storages/backends/s3boto.py @@ -1,16 +1,20 @@ -import os import mimetypes +import os from datetime import datetime from gzip import GzipFile from tempfile import SpooledTemporaryFile +from django.core.exceptions import ImproperlyConfigured, SuspiciousOperation from django.core.files.base import File from django.core.files.storage import Storage -from django.core.exceptions import ImproperlyConfigured, SuspiciousOperation +from django.utils import timezone as tz from django.utils.deconstruct import deconstructible -from django.utils.encoding import force_text, smart_str, filepath_to_uri, force_bytes +from django.utils.encoding import ( + filepath_to_uri, force_bytes, force_text, smart_str, +) from django.utils.six import BytesIO -from django.utils import timezone as tz + +from storages.utils import clean_name, safe_join, setting try: from boto import __version__ as boto_version @@ -22,7 +26,6 @@ raise ImproperlyConfigured("Could not load Boto's S3 bindings.\n" "See https://github.com/boto/boto") -from storages.utils import clean_name, safe_join, setting boto_version_info = tuple([int(i) for i in boto_version.split('-')[0].split('.')]) diff --git a/storages/backends/s3boto3.py b/storages/backends/s3boto3.py index 1a1badb1f..d00b2708e 100644 --- a/storages/backends/s3boto3.py +++ b/storages/backends/s3boto3.py @@ -1,6 +1,6 @@ +import mimetypes import os import posixpath -import mimetypes from gzip import GzipFile from tempfile import SpooledTemporaryFile @@ -8,10 +8,14 @@ from django.core.files.base import File from django.core.files.storage import Storage from django.utils.deconstruct import deconstructible -from django.utils.encoding import force_text, smart_text, filepath_to_uri, force_bytes -from django.utils.six.moves.urllib import parse as urlparse +from django.utils.encoding import ( + filepath_to_uri, force_bytes, force_text, smart_text, +) from django.utils.six import BytesIO -from django.utils.timezone import localtime, is_naive +from django.utils.six.moves.urllib import parse as urlparse +from django.utils.timezone import is_naive, localtime + +from storages.utils import safe_join, setting try: import boto3.session @@ -22,7 +26,6 @@ raise ImproperlyConfigured("Could not load Boto3's S3 bindings.\n" "See https://github.com/boto/boto3") -from storages.utils import setting, safe_join boto3_version_info = tuple([int(i) for i in boto3_version.split('.')]) diff --git a/storages/backends/sftpstorage.py b/storages/backends/sftpstorage.py index 340ed98e2..f07e8cf5d 100644 --- a/storages/backends/sftpstorage.py +++ b/storages/backends/sftpstorage.py @@ -1,17 +1,17 @@ -from __future__ import print_function # SFTP storage backend for Django. # Author: Brent Tubbs # License: MIT # # Modeled on the FTP storage by Rafal Jonca +from __future__ import print_function import getpass import os -import paramiko import posixpath import stat from datetime import datetime +import paramiko from django.core.files.base import File from django.core.files.storage import Storage from django.utils.deconstruct import deconstructible diff --git a/tests/test_dropbox.py b/tests/test_dropbox.py index e212b0dce..ae6789659 100644 --- a/tests/test_dropbox.py +++ b/tests/test_dropbox.py @@ -1,16 +1,19 @@ import re from datetime import datetime + +from django.core.exceptions import ( + ImproperlyConfigured, SuspiciousFileOperation, +) +from django.core.files.base import ContentFile, File +from django.test import TestCase + +from storages.backends import dropbox + try: from unittest import mock except ImportError: # Python 3.2 and below import mock -from django.test import TestCase -from django.core.files.base import File, ContentFile -from django.core.exceptions import ImproperlyConfigured, \ - SuspiciousFileOperation - -from storages.backends import dropbox FILE_DATE = datetime(2015, 8, 24, 15, 6, 41) FILE_FIXTURE = { diff --git a/tests/test_ftp.py b/tests/test_ftp.py index 60164bcb7..34ae7140b 100644 --- a/tests/test_ftp.py +++ b/tests/test_ftp.py @@ -4,9 +4,9 @@ from mock import patch from datetime import datetime -from django.test import TestCase from django.core.exceptions import ImproperlyConfigured from django.core.files.base import File +from django.test import TestCase from django.utils.six import BytesIO from storages.backends import ftp diff --git a/tests/test_gcloud.py b/tests/test_gcloud.py index a5db15d8e..ef73f3f5d 100644 --- a/tests/test_gcloud.py +++ b/tests/test_gcloud.py @@ -8,9 +8,8 @@ import datetime from django.core.files.base import ContentFile -from django.utils import timezone from django.test import TestCase - +from django.utils import timezone from google.cloud.exceptions import NotFound from google.cloud.storage.blob import Blob diff --git a/tests/test_gs.py b/tests/test_gs.py index 814fc3391..48ad71e78 100644 --- a/tests/test_gs.py +++ b/tests/test_gs.py @@ -1,5 +1,5 @@ -from django.test import TestCase from django.core.files.base import ContentFile +from django.test import TestCase from storages.backends import gs, s3boto diff --git a/tests/test_s3boto.py b/tests/test_s3boto.py index 07a3f40f3..13fc59df6 100644 --- a/tests/test_s3boto.py +++ b/tests/test_s3boto.py @@ -5,14 +5,13 @@ import datetime -from django.test import TestCase -from django.core.files.base import ContentFile -from django.utils.six.moves.urllib import parse as urlparse -from django.utils import timezone as tz - from boto.exception import S3ResponseError from boto.s3.key import Key -from boto.utils import parse_ts, ISO8601 +from boto.utils import ISO8601, parse_ts +from django.core.files.base import ContentFile +from django.test import TestCase +from django.utils import timezone as tz +from django.utils.six.moves.urllib import parse as urlparse from storages.backends import s3boto diff --git a/tests/test_s3boto3.py b/tests/test_s3boto3.py index 52dfda93c..855a66005 100644 --- a/tests/test_s3boto3.py +++ b/tests/test_s3boto3.py @@ -1,23 +1,23 @@ # -*- coding: utf-8 -*- from __future__ import unicode_literals -from datetime import datetime import gzip -try: - from unittest import mock -except ImportError: # Python 3.2 and below - import mock +from datetime import datetime -from django.test import TestCase +from botocore.exceptions import ClientError from django.conf import settings from django.core.files.base import ContentFile +from django.test import TestCase from django.utils.six.moves.urllib import parse as urlparse from django.utils.timezone import is_aware, utc -from botocore.exceptions import ClientError - from storages.backends import s3boto3 +try: + from unittest import mock +except ImportError: # Python 3.2 and below + import mock + class S3Boto3TestCase(TestCase): def setUp(self): diff --git a/tests/test_sftp.py b/tests/test_sftp.py index 39dd78800..754e98703 100644 --- a/tests/test_sftp.py +++ b/tests/test_sftp.py @@ -2,16 +2,17 @@ import stat from datetime import datetime +from django.core.files.base import File +from django.test import TestCase +from django.utils.six import BytesIO + +from storages.backends import sftpstorage + try: from unittest.mock import patch, MagicMock except ImportError: # Python 3.2 and below from mock import patch, MagicMock -from django.test import TestCase -from django.core.files.base import File -from django.utils.six import BytesIO -from storages.backends import sftpstorage - class SFTPStorageTest(TestCase): def setUp(self): diff --git a/tests/test_utils.py b/tests/test_utils.py index d76697b4d..85dab2227 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -1,8 +1,9 @@ import datetime -from django.test import TestCase from django.conf import settings from django.core.exceptions import ImproperlyConfigured +from django.test import TestCase + from storages import utils diff --git a/tox.ini b/tox.ini index 40414722d..6768b665c 100644 --- a/tox.ini +++ b/tox.ini @@ -1,6 +1,7 @@ [tox] envlist = flake8 + isort {py27,py33,py34,py35}-django18, {py27,py34,py35}-django110 {py27,py34,py35,py36}-django111 @@ -27,3 +28,7 @@ deps = [testenv:flake8] deps = flake8 commands = flake8 + +[testenv:isort] +deps = isort +commands = isort --recursive --check-only --diff storages/ tests/ From e3f7a0a484162023cc0567d7628db60980af2273 Mon Sep 17 00:00:00 2001 From: Jon Dufresne Date: Wed, 21 Jun 2017 21:59:57 -0700 Subject: [PATCH 140/174] Sync dependency versions between requirements-tests.txt and tox.ini (#337) * Fixed typo dropbox>=7.21 -> dropbox>=7.2.1 * Alphabetized tox dependencies * In requirements-tests.txt, change google-cloud>=0.25.0 -> google-cloud-storage>=0.22.0 to match tox.ini --- requirements-tests.txt | 4 ++-- tox.ini | 8 ++++---- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/requirements-tests.txt b/requirements-tests.txt index 2637f7d13..4f6a95239 100644 --- a/requirements-tests.txt +++ b/requirements-tests.txt @@ -1,9 +1,9 @@ boto3>=1.2.3 boto>=2.32.0 -dropbox>=7.21 +dropbox>=7.2.1 Django>=1.8 flake8 -google-cloud>=0.25.0 +google-cloud-storage>=0.22.0 mock paramiko pytest-cov>=2.2.1 diff --git a/tox.ini b/tox.ini index 6768b665c..d3c25059c 100644 --- a/tox.ini +++ b/tox.ini @@ -17,12 +17,12 @@ deps = django110: Django>=1.10, <1.11 django111: Django>=1.11, <2.0 py27: mock - boto>=2.32.0 - pytest-cov>=2.2.1 boto3>=1.2.3 - dropbox>=3.24 - paramiko + boto>=2.32.0 + dropbox>=7.2.1 google-cloud-storage>=0.22.0 + paramiko + pytest-cov>=2.2.1 [testenv:flake8] From 4f05887a73e1600c2c5cddc6b52d6dc937d515f8 Mon Sep 17 00:00:00 2001 From: Josh Schneier Date: Thu, 22 Jun 2017 01:05:48 -0400 Subject: [PATCH 141/174] Update CHANGELOG --- CHANGELOG.rst | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.rst b/CHANGELOG.rst index 47a56982a..11b0c412f 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -1,11 +1,15 @@ django-storages change log ========================== -1.6.1 (UNRELEASED) +1.6.1 (XXXX-XX-XX) ****************** +* Drop support for Django 1.9 (`e89db45`_) * Fix regression in ``safe_join()`` to allow joining a base path with an empty - string. + string. (`#336_`) + +.. _e89db45: https://github.com/jschneier/django-storages/commit/e89db451d7e617638b5991e31df4c8de196546a6 +.. _#336: https://github.com/jschneier/django-storages/pull/217 1.6 (2017-06-21) ****************** From e516fbb3acb8996efd23dd0a9447250f56703da1 Mon Sep 17 00:00:00 2001 From: Josh Schneier Date: Thu, 22 Jun 2017 01:11:51 -0400 Subject: [PATCH 142/174] Release version 1.6.1 --- CHANGELOG.rst | 2 +- storages/__init__.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.rst b/CHANGELOG.rst index 11b0c412f..d2cc1eb64 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -1,7 +1,7 @@ django-storages change log ========================== -1.6.1 (XXXX-XX-XX) +1.6.1 (2017-06-22) ****************** * Drop support for Django 1.9 (`e89db45`_) diff --git a/storages/__init__.py b/storages/__init__.py index 6d5e09d89..bb64aa472 100644 --- a/storages/__init__.py +++ b/storages/__init__.py @@ -1 +1 @@ -__version__ = '1.6' +__version__ = '1.6.1' From 39f9b8043638945e55c894e7093a59e0caaf1507 Mon Sep 17 00:00:00 2001 From: Jon Dufresne Date: Wed, 21 Jun 2017 21:15:33 -0700 Subject: [PATCH 143/174] Use dict comprehension and set literals Remove unnecessary lists, generators, and functions calls by using modern Python syntax: dict comprehension, set literals, dict literals. --- storages/backends/s3boto.py | 10 ++++------ storages/backends/s3boto3.py | 14 +++++++++----- tests/test_s3boto.py | 18 +++++++++--------- 3 files changed, 22 insertions(+), 20 deletions(-) diff --git a/storages/backends/s3boto.py b/storages/backends/s3boto.py index c48ef10d0..9a0150bc3 100644 --- a/storages/backends/s3boto.py +++ b/storages/backends/s3boto.py @@ -275,12 +275,10 @@ def entries(self): Get the locally cached files for the bucket. """ if self.preload_metadata and not self._loaded_meta: - self._entries.update( - dict( - (self._decode_name(entry.key), entry) - for entry in self.bucket.list(prefix=self.location) - ) - ) + self._entries.update({ + self._decode_name(entry.key): entry + for entry in self.bucket.list(prefix=self.location) + }) self._loaded_meta = True return self._entries diff --git a/storages/backends/s3boto3.py b/storages/backends/s3boto3.py index d00b2708e..0ce52738d 100644 --- a/storages/backends/s3boto3.py +++ b/storages/backends/s3boto3.py @@ -286,8 +286,10 @@ def entries(self): Get the locally cached files for the bucket. """ if self.preload_metadata and not self._entries: - self._entries = dict((self._decode_name(entry.key), entry) - for entry in self.bucket.objects.filter(Prefix=self.location)) + self._entries = { + self._decode_name(entry.key): entry + for entry in self.bucket.objects.filter(Prefix=self.location) + } return self._entries def _lookup_env(self, names): @@ -532,9 +534,11 @@ def _strip_signing_parameters(self, url): # from v2 and v4 signatures, regardless of the actual signature version used. split_url = urlparse.urlsplit(url) qs = urlparse.parse_qsl(split_url.query, keep_blank_values=True) - blacklist = set(['x-amz-algorithm', 'x-amz-credential', 'x-amz-date', - 'x-amz-expires', 'x-amz-signedheaders', 'x-amz-signature', - 'x-amz-security-token', 'awsaccesskeyid', 'expires', 'signature']) + blacklist = { + 'x-amz-algorithm', 'x-amz-credential', 'x-amz-date', + 'x-amz-expires', 'x-amz-signedheaders', 'x-amz-signature', + 'x-amz-security-token', 'awsaccesskeyid', 'expires', 'signature', + } filtered_qs = ((key, val) for key, val in qs if key.lower() not in blacklist) # Note: Parameters that did not have a value in the original query string will have # an '=' sign appended to it, e.g ?foo&bar becomes ?foo=&bar= diff --git a/tests/test_s3boto.py b/tests/test_s3boto.py index 13fc59df6..0457cc506 100644 --- a/tests/test_s3boto.py +++ b/tests/test_s3boto.py @@ -221,15 +221,15 @@ def test_storage_url(self): url = 'http://aws.amazon.com/%s' % name self.storage.connection.generate_url.return_value = url - kwargs = dict( - method='GET', - bucket=self.storage.bucket.name, - key=name, - query_auth=self.storage.querystring_auth, - force_http=not self.storage.secure_urls, - headers=None, - response_headers=None, - ) + kwargs = { + 'method': 'GET', + 'bucket': self.storage.bucket.name, + 'key': name, + 'query_auth': self.storage.querystring_auth, + 'force_http': not self.storage.secure_urls, + 'headers': None, + 'response_headers': None, + } self.assertEqual(self.storage.url(name), url) self.storage.connection.generate_url.assert_called_with( From 112ae43b076379f2a75e62f56f44ffb3677a2d5e Mon Sep 17 00:00:00 2001 From: Jon Dufresne Date: Thu, 22 Jun 2017 12:16:30 -0700 Subject: [PATCH 144/174] Fix safe_join to handle trailing slash with intermediate paths (#341) Regression introduced in 895a068fcf3f23a6b71294c5b4b67a822df6e642 Partially reverts 39a2a7a8c08eadd2952cab485ea76434ae86b12f --- CHANGELOG.rst | 6 ++++++ storages/utils.py | 15 ++++++++++----- tests/test_utils.py | 20 ++++++++++++++++++++ 3 files changed, 36 insertions(+), 5 deletions(-) diff --git a/CHANGELOG.rst b/CHANGELOG.rst index d2cc1eb64..9681f7466 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -1,6 +1,12 @@ django-storages change log ========================== +1.6.2 (UNRELEASED) +****************** + +* Fix regression in ``safe_join()`` to handle a trailing slash in an + intermediate path. + 1.6.1 (2017-06-22) ****************** diff --git a/storages/utils.py b/storages/utils.py index b1ff45d5d..566aa5127 100644 --- a/storages/utils.py +++ b/storages/utils.py @@ -60,11 +60,16 @@ def safe_join(base, *paths): """ base_path = force_text(base) base_path = base_path.rstrip('/') - paths = [base_path + '/'] + [force_text(p) for p in paths if p] - - final_path = posixpath.normpath(posixpath.join(*paths)) - # posixpath.normpath() strips the trailing /. Add it back. - if paths[-1].endswith('/'): + paths = [force_text(p) for p in paths] + + final_path = base_path + '/' + for path in paths: + _final_path = posixpath.normpath(posixpath.join(final_path, path)) + # posixpath.normpath() strips the trailing /. Add it back. + if path.endswith('/') or _final_path + '/' == final_path: + _final_path += '/' + final_path = _final_path + if final_path == base_path: final_path += '/' # Ensure final_path starts with base_path and that the next character after diff --git a/tests/test_utils.py b/tests/test_utils.py index 85dab2227..eb309acd9 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -59,6 +59,10 @@ def test_with_dot(self): ".", "to/./somewhere") self.assertEqual(path, "path/to/somewhere") + def test_with_only_dot(self): + path = utils.safe_join("", ".") + self.assertEqual(path, "") + def test_base_url(self): path = utils.safe_join("base_url", "path/to/somewhere") self.assertEqual(path, "base_url/path/to/somewhere") @@ -95,3 +99,19 @@ def test_datetime_isoformat(self): def test_join_empty_string(self): path = utils.safe_join('base_url', '') self.assertEqual(path, 'base_url/') + + def test_with_base_url_and_dot(self): + path = utils.safe_join('base_url', '.') + self.assertEqual(path, 'base_url/') + + def test_with_base_url_and_dot_and_path_and_slash(self): + path = utils.safe_join('base_url', '.', 'path/to/', '.') + self.assertEqual(path, 'base_url/path/to/') + + def test_join_nothing(self): + path = utils.safe_join('') + self.assertEqual(path, '') + + def test_with_base_url_join_nothing(self): + path = utils.safe_join('base_url') + self.assertEqual(path, 'base_url/') From aa0cd4455e5f76f83f55b0265d8c21168e91921c Mon Sep 17 00:00:00 2001 From: Josh Schneier Date: Thu, 22 Jun 2017 15:26:11 -0400 Subject: [PATCH 145/174] Fix GSBotoStorage connection kwargs (#342) --- storages/backends/gs.py | 5 +++++ storages/backends/s3boto.py | 21 ++++++++++++++------- 2 files changed, 19 insertions(+), 7 deletions(-) diff --git a/storages/backends/gs.py b/storages/backends/gs.py index 05142f7f0..38256d002 100644 --- a/storages/backends/gs.py +++ b/storages/backends/gs.py @@ -80,6 +80,11 @@ class GSBotoStorage(S3BotoStorage): url_protocol = setting('GS_URL_PROTOCOL', 'http:') host = setting('GS_HOST', GSConnection.DefaultHost) + def _get_connection_kwargs(self): + kwargs = super(GSBotoStorage, self)._get_connection_kwargs() + del kwargs['security_token'] + return kwargs + def _save_content(self, key, content, headers): # only pass backwards incompatible arguments if they vary from the default options = {} diff --git a/storages/backends/s3boto.py b/storages/backends/s3boto.py index 9a0150bc3..981faa441 100644 --- a/storages/backends/s3boto.py +++ b/storages/backends/s3boto.py @@ -246,19 +246,26 @@ def __init__(self, acl=None, bucket=None, **settings): @property def connection(self): if self._connection is None: + kwargs = self._get_connection_kwargs() + self._connection = self.connection_class( self.access_key, self.secret_key, - security_token=self.security_token, - is_secure=self.use_ssl, - calling_format=self.calling_format, - host=self.host, - port=self.port, - proxy=self.proxy, - proxy_port=self.proxy_port + **kwargs ) return self._connection + def _get_connection_kwargs(self): + return dict( + security_token=self.security_token, + is_secure=self.use_ssl, + calling_format=self.calling_format, + host=self.host, + port=self.port, + proxy=self.proxy, + proxy_port=self.proxy_port + ) + @property def bucket(self): """ From 98556abc0d9fbf3ec4e06e13034410decafa042c Mon Sep 17 00:00:00 2001 From: Josh Schneier Date: Thu, 22 Jun 2017 15:36:19 -0400 Subject: [PATCH 146/174] Update CHANGELOG for #342 --- CHANGELOG.rst | 11 ++++++++--- 1 file changed, 8 insertions(+), 3 deletions(-) diff --git a/CHANGELOG.rst b/CHANGELOG.rst index 9681f7466..1790f9ac3 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -1,11 +1,16 @@ django-storages change log ========================== -1.6.2 (UNRELEASED) +1.6.2 (XXXX-XX-XX) ****************** * Fix regression in ``safe_join()`` to handle a trailing slash in an - intermediate path. + intermediate path. (`#341`_) +* Fix regression in ``gs.GSBotoStorage`` got an unespected kwarg. + (`#342`_) + +.. _#341: https://github.com/jschneier/django-storages/pull/341 +.. _#342: https://github.com/jschneier/django-storages/pull/342 1.6.1 (2017-06-22) ****************** @@ -15,7 +20,7 @@ django-storages change log string. (`#336_`) .. _e89db45: https://github.com/jschneier/django-storages/commit/e89db451d7e617638b5991e31df4c8de196546a6 -.. _#336: https://github.com/jschneier/django-storages/pull/217 +.. _#336: https://github.com/jschneier/django-storages/pull/336 1.6 (2017-06-21) ****************** From c5c05192931b2a5a87eabd58c6ad520121ac7f1f Mon Sep 17 00:00:00 2001 From: Josh Schneier Date: Thu, 22 Jun 2017 15:42:59 -0400 Subject: [PATCH 147/174] Release version 1.6.2 --- CHANGELOG.rst | 2 +- storages/__init__.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.rst b/CHANGELOG.rst index 1790f9ac3..0e7a5f495 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -1,7 +1,7 @@ django-storages change log ========================== -1.6.2 (XXXX-XX-XX) +1.6.2 (2017-06-22) ****************** * Fix regression in ``safe_join()`` to handle a trailing slash in an diff --git a/storages/__init__.py b/storages/__init__.py index bb64aa472..4a9b97884 100644 --- a/storages/__init__.py +++ b/storages/__init__.py @@ -1 +1 @@ -__version__ = '1.6.1' +__version__ = '1.6.2' From 309f06648954c1915370fe5be5b5929633bf6459 Mon Sep 17 00:00:00 2001 From: Josh Schneier Date: Thu, 22 Jun 2017 15:46:26 -0400 Subject: [PATCH 148/174] Fix CHANGELOG link --- CHANGELOG.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CHANGELOG.rst b/CHANGELOG.rst index 0e7a5f495..f49fd50bc 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -17,7 +17,7 @@ django-storages change log * Drop support for Django 1.9 (`e89db45`_) * Fix regression in ``safe_join()`` to allow joining a base path with an empty - string. (`#336_`) + string. (`#336`_) .. _e89db45: https://github.com/jschneier/django-storages/commit/e89db451d7e617638b5991e31df4c8de196546a6 .. _#336: https://github.com/jschneier/django-storages/pull/336 From f6c615b0786bf3d38979d25f3f7cdd8ffdf97792 Mon Sep 17 00:00:00 2001 From: Josh Schneier Date: Thu, 22 Jun 2017 15:48:19 -0400 Subject: [PATCH 149/174] Remove the rest of the thanks noise --- CHANGELOG.rst | 52 +++++++++++++++++++++++++-------------------------- 1 file changed, 26 insertions(+), 26 deletions(-) diff --git a/CHANGELOG.rst b/CHANGELOG.rst index f49fd50bc..70b293ab2 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -71,12 +71,12 @@ django-storages change log 1.5.2 (2017-01-13) ****************** -* Actually use ``SFTP_STORAGE_HOST`` in ``SFTPStorage`` backend (`#204`_ thanks @jbittel) -* Fix ``S3Boto3Storage`` to avoid race conditions in a multi-threaded WSGI environment (`#238`_ thanks @jdufresne) +* Actually use ``SFTP_STORAGE_HOST`` in ``SFTPStorage`` backend (`#204`_) +* Fix ``S3Boto3Storage`` to avoid race conditions in a multi-threaded WSGI environment (`#238`_) * Fix trying to localize a naive datetime when ``settings.USE_TZ`` is ``False`` in ``S3Boto3Storage.modified_time``. - (thanks to @tomchuk and @piglei for the reports and the patches, `#235`_, `#234`_) -* Fix automatic bucket creation in ``S3Boto3Storage`` when ``AWS_AUTO_CREATE_BUCKET`` is ``True`` (`#196`_ thanks @linuxlewis) -* Improve the documentation for the S3 backends (thanks to various contributors!) + (`#235`_, `#234`_) +* Fix automatic bucket creation in ``S3Boto3Storage`` when ``AWS_AUTO_CREATE_BUCKET`` is ``True`` (`#196`_) +* Improve the documentation for the S3 backends .. _#204: https://github.com/jschneier/django-storages/pull/204 .. _#238: https://github.com/jschneier/django-storages/pull/238 @@ -94,8 +94,8 @@ django-storages change log a stable output for gzipped files * Use ``.putfileobj`` instead of ``.put`` in ``S3Boto3Storage`` to use the transfer manager, allowing files greater than 5GB to be put on S3 (`#194`_ , `#201`_) -* Update ``S3Boto3Storage`` for Django 1.10 (`#181`_) (``get_modified_time`` and ``get_accessed_time``) thanks @JshWright -* Fix bad kwarg name in ``S3Boto3Storage`` when `AWS_PRELOAD_METADATA` is `True` (`#189`_, `#190`_) thanks @leonsmith +* Update ``S3Boto3Storage`` for Django 1.10 (`#181`_) (``get_modified_time`` and ``get_accessed_time``) +* Fix bad kwarg name in ``S3Boto3Storage`` when `AWS_PRELOAD_METADATA` is `True` (`#189`_, `#190`_) .. _issue #202: https://github.com/jschneier/django-storages/issues/202 .. _#201: https://github.com/jschneier/django-storages/pull/201 @@ -108,13 +108,13 @@ django-storages change log 1.5.0 (2016-08-02) ****************** -* Add new backend ``S3Boto3Storage`` (`#179`_) MASSIVE THANKS @mbarrien -* Add a `strict` option to `utils.setting` (`#176`_) thanks @ZuluPro -* Tests, documentation, fixing ``.close`` for ``SFTPStorage`` (`#177`_) thanks @ZuluPro -* Tests, documentation, add `.readlines` for ``FTPStorage`` (`#175`_) thanks @ZuluPro -* Tests and documentation for ``DropBoxStorage`` (`#174`_) thanks @ZuluPro -* Fix ``MANIFEST.in`` to not ship ``.pyc`` files. (`#145`_) thanks @fladi -* Enable CI testing of Python 3.5 and fix test failure from api change (`#171`_) thanks @tnir +* Add new backend ``S3Boto3Storage`` (`#179`_) +* Add a `strict` option to `utils.setting` (`#176`_) +* Tests, documentation, fixing ``.close`` for ``SFTPStorage`` (`#177`_) +* Tests, documentation, add `.readlines` for ``FTPStorage`` (`#175`_) +* Tests and documentation for ``DropBoxStorage`` (`#174`_) +* Fix ``MANIFEST.in`` to not ship ``.pyc`` files. (`#145`_) +* Enable CI testing of Python 3.5 and fix test failure from api change (`#171`_) .. _#145: https://github.com/jschneier/django-storages/pull/145 .. _#171: https://github.com/jschneier/django-storages/pull/171 @@ -129,10 +129,10 @@ django-storages change log * Files that have a guessable encoding (e.g. gzip or compress) will be uploaded with that Content-Encoding in the ``s3boto`` backend. Compressable types such as ``application/javascript`` will still be gzipped. - PR `#122`_ thanks @cambonf -* Fix ``DropBoxStorage.exists`` check and add ``DropBoxStorage.url`` (`#127`_) thanks @zuck + PR `#122`_ +* Fix ``DropBoxStorage.exists`` check and add ``DropBoxStorage.url`` (`#127`_) * Add ``GS_HOST`` setting (with a default of ``GSConnection.DefaultHost``) to fix ``GSBotoStorage``. - Issue `#124`_. Fixed in `#125`_. Thanks @patgmiller @dcgoss. + Issue `#124`_. Fixed in `#125`_. .. _#122: https://github.com/jschneier/django-storages/pull/122 .. _#127: https://github.com/jschneier/django-storages/pull/127 @@ -148,10 +148,10 @@ django-storages change log 1.3.2 (2016-01-26) ****************** -* Fix memory leak from not closing underlying temp file in ``s3boto`` backend (`#106`_) thanks @kmmbvnr -* Allow easily specifying a custom expiry time when generating a url for ``S3BotoStorage`` (`#96`_) thanks @mattbriancon +* Fix memory leak from not closing underlying temp file in ``s3boto`` backend (`#106`_) +* Allow easily specifying a custom expiry time when generating a url for ``S3BotoStorage`` (`#96`_) * Check for bucket existence when the empty path ('') is passed to ``storage.exists`` in ``S3BotoStorage`` - - this prevents a crash when running ``collectstatic -c`` on Django 1.9.1 (`#112`_) fixed in `#116`_ thanks @xblitz + this prevents a crash when running ``collectstatic -c`` on Django 1.9.1 (`#112`_) fixed in `#116`_ .. _#106: https://github.com/jschneier/django-storages/pull/106 .. _#96: https://github.com/jschneier/django-storages/pull/96 @@ -162,12 +162,12 @@ django-storages change log 1.3.1 (2016-01-12) ****************** -* A few Azure Storage fixes [pass the content-type to Azure, handle chunked content, fix ``url``] (`#45`__) thanks @erlingbo -* Add support for a Dropbox (``dropbox``) storage backend, thanks @ZuluPro (`#76`_) +* A few Azure Storage fixes [pass the content-type to Azure, handle chunked content, fix ``url``] (`#45`__) +* Add support for a Dropbox (``dropbox``) storage backend * Various fixes to the ``apache_libcloud`` backend [return the number of bytes asked for by ``.read``, make ``.name`` non-private, don't - initialize to an empty ``BytesIO`` object] thanks @kaedroho (`#55`_) -* Fix multi-part uploads in ``s3boto`` backend not respecting ``AWS_S3_ENCRYPTION`` (`#94`_) thanks @andersontep -* Automatically gzip svg files thanks @comandrei (`#100`_) + initialize to an empty ``BytesIO`` object] (`#55`_) +* Fix multi-part uploads in ``s3boto`` backend not respecting ``AWS_S3_ENCRYPTION`` (`#94`_) +* Automatically gzip svg files (`#100`_) .. __: https://github.com/jschneier/django-storages/pull/45 .. _#76: https://github.com/jschneier/django-storages/pull/76 @@ -185,7 +185,7 @@ django-storages change log * Add support for Django 1.8+ (`#36`__) * Add ``AWS_S3_PROXY_HOST`` and ``AWS_S3_PROXY_PORT`` settings for s3boto backend (`#41`_) * Fix Python3K compat issue in apache_libcloud (`#52`_) -* Fix Google Storage backend not respecting ``GS_IS_GZIPPED`` setting (`#51`__, `#60`_) thanks @stmos +* Fix Google Storage backend not respecting ``GS_IS_GZIPPED`` setting (`#51`__, `#60`_) * Rename FTP ``_name`` attribute to ``name`` which is what the Django ``File`` api is expecting (`#70`_) * Put ``StorageMixin`` first in inheritance to maintain backwards compat with older versions of Django (`#63`_) From 744339fbf493a1af76e11df90c3724313f6b70c8 Mon Sep 17 00:00:00 2001 From: Josh Schneier Date: Fri, 23 Jun 2017 00:59:58 -0400 Subject: [PATCH 150/174] Revert default AWS_S3_SIGNATURE_VERSION (#344) --- docs/backends/amazon-S3.rst | 39 +++++++++++++++++++++++------------- storages/backends/s3boto3.py | 2 +- 2 files changed, 26 insertions(+), 15 deletions(-) diff --git a/docs/backends/amazon-S3.rst b/docs/backends/amazon-S3.rst index 7689b6ce6..a56ab7949 100644 --- a/docs/backends/amazon-S3.rst +++ b/docs/backends/amazon-S3.rst @@ -14,9 +14,11 @@ for the forseeable future. For historical completeness an extreme legacy backend was removed in version 1.2 -If using the boto backend it is recommended that you configure it to also use -`AWS Signature Version 4`_. This can be done by adding ``S3_USE_SIGV4 = True`` to -your settings and setting the ``AWS_S3_HOST`` configuration option. +If using the boto backend on a new project (not recommended) it is recommended +that you configure it to also use `AWS Signature Version 4`_. This can be done +by adding ``S3_USE_SIGV4 = True`` to your settings and setting the ``AWS_S3_HOST`` +configuration option. For regions created after January 2014 this is your only +option if you insist on using the boto backend. Settings -------- @@ -53,7 +55,6 @@ Available are numerous settings. It should be especially noted the following: ``AWS_HEADERS`` (optional - boto only, for boto3 see ``AWS_S3_OBJECT_PARAMETERS``) If you'd like to set headers sent with each file of the storage:: - # see http://developer.yahoo.com/performance/rules.html#expires AWS_HEADERS = { 'Expires': 'Thu, 15 Apr 2010 20:00:00 GMT', 'Cache-Control': 'max-age=86400', @@ -67,9 +68,9 @@ Available are numerous settings. It should be especially noted the following: } ``AWS_QUERYSTRING_AUTH`` (optional; default is ``True``) - Setting ``AWS_QUERYSTRING_AUTH`` to ``False`` removes `query parameter - authentication`_ from generated URLs. This can be useful if your S3 buckets are - public. + Setting ``AWS_QUERYSTRING_AUTH`` to ``False`` to remove query parameter + authentication from generated URLs. This can be useful if your S3 buckets + are public. ``AWS_QUERYSTRING_EXPIRE`` (optional; default is 3600 seconds) The number of seconds that a generated URL is valid for. @@ -83,8 +84,13 @@ Available are numerous settings. It should be especially noted the following: ``AWS_S3_HOST`` (optional - boto only, default is ``s3.amazonaws.com``) To ensure you use `AWS Signature Version 4`_ it is recommended to set this to the host of your bucket. See the - `mapping of region to endpoint names`_ to figure out the appropriate endpoint for your bucket. Also be sure to - add ``S3_USE_SIGV4 = True`` to settings.py + `S3 region list`_ to figure out the appropriate endpoint for your bucket. Also be sure to add + ``S3_USE_SIGV4 = True`` to settings.py + + .. note:: + + The signature versions are not backwards compatible so be careful about url endpoints if making this change + for legacy projects. ``AWS_LOCATION`` (optional: default is `''`) A path prefix that will be prepended to all uploads @@ -107,14 +113,19 @@ Available are numerous settings. It should be especially noted the following: ``AWS_S3_CALLING_FORMAT`` (optional: default is ``SubdomainCallingFormat()``) Defines the S3 calling format to use to connect to the static bucket. -``AWS_S3_SIGNATURE_VERSION`` (optional - boto3 only: default is ``s3v4``) +``AWS_S3_SIGNATURE_VERSION`` (optional - boto3 only) + + All AWS regions support v4 of the signing protocol. To use it set this to ``'s3v4'``. It is recommended + to do this for all new projects and required for all regions launched after January 2014. To see + if your region is one of them you can view the `S3 region list`_. + + .. note:: - All AWS regions support the v4 version of the signing protocol. To use the legacy v2 set this to ``'s3'``. Some non-Amazon S3 - implementations might require this change. + The signature versions are not backwards compatible so be careful about url endpoints if making this change + for legacy projects. -.. _query parameter authentication: https://docs.aws.amazon.com/AmazonS3/latest/API/sigv4-query-string-auth.html .. _AWS Signature Version 4: https://docs.aws.amazon.com/AmazonS3/latest/API/sigv4-query-string-auth.html -.. _mapping of region to endpoint names: http://docs.aws.amazon.com/general/latest/gr/rande.html#s3_region +.. _S3 region list: http://docs.aws.amazon.com/general/latest/gr/rande.html#s3_region CloudFront ~~~~~~~~~~ diff --git a/storages/backends/s3boto3.py b/storages/backends/s3boto3.py index 0ce52738d..c85a8a49a 100644 --- a/storages/backends/s3boto3.py +++ b/storages/backends/s3boto3.py @@ -191,7 +191,7 @@ class S3Boto3Storage(Storage): bucket_acl = setting('AWS_BUCKET_ACL', default_acl) querystring_auth = setting('AWS_QUERYSTRING_AUTH', True) querystring_expire = setting('AWS_QUERYSTRING_EXPIRE', 3600) - signature_version = setting('AWS_S3_SIGNATURE_VERSION', 's3v4') + signature_version = setting('AWS_S3_SIGNATURE_VERSION') reduced_redundancy = setting('AWS_REDUCED_REDUNDANCY', False) location = setting('AWS_LOCATION', '') encryption = setting('AWS_S3_ENCRYPTION', False) From fe0ee5324a2a16dfc490e57b61c61aa343fde151 Mon Sep 17 00:00:00 2001 From: Josh Schneier Date: Fri, 23 Jun 2017 01:01:07 -0400 Subject: [PATCH 151/174] Update CHANGELOG for #344 --- CHANGELOG.rst | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/CHANGELOG.rst b/CHANGELOG.rst index 70b293ab2..77f1f4999 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -1,6 +1,15 @@ django-storages change log ========================== +1.6.3 (XXXX-XX-XX) +****************** + +* Revert default ``AWS_S3_SIGNATURE_VERSION`` to be V2 to restore backwards + compatability in ``S3Boto3``. It's recommended that all new projects set + this to be ``'s3v4'``. (`#344`_) + +.. _#344: https://github.com/jschneier/django-storages/pull/344 + 1.6.2 (2017-06-22) ****************** From ef2c0e3b520fa36757c1186f8ded696a0ffadb25 Mon Sep 17 00:00:00 2001 From: Josh Schneier Date: Fri, 23 Jun 2017 01:01:29 -0400 Subject: [PATCH 152/174] Release version 1.6.3 --- CHANGELOG.rst | 2 +- storages/__init__.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.rst b/CHANGELOG.rst index 77f1f4999..5fcd68220 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -1,7 +1,7 @@ django-storages change log ========================== -1.6.3 (XXXX-XX-XX) +1.6.3 (2017-06-23) ****************** * Revert default ``AWS_S3_SIGNATURE_VERSION`` to be V2 to restore backwards diff --git a/storages/__init__.py b/storages/__init__.py index 4a9b97884..4574cc897 100644 --- a/storages/__init__.py +++ b/storages/__init__.py @@ -1 +1 @@ -__version__ = '1.6.2' +__version__ = '1.6.3' From e10132920a40b76bed0ee16482e81e50465f4662 Mon Sep 17 00:00:00 2001 From: Jon Dufresne Date: Fri, 23 Jun 2017 05:56:14 -0700 Subject: [PATCH 153/174] Consolidate flake8 & isort tox envs to a single lint env Fall under the general umbrella of linting the project. Run all linting tools as one stage of testing. --- .travis.yml | 3 +-- tox.ini | 19 +++++++++---------- 2 files changed, 10 insertions(+), 12 deletions(-) diff --git a/.travis.yml b/.travis.yml index 127b0f1c0..0f03d69ff 100644 --- a/.travis.yml +++ b/.travis.yml @@ -4,8 +4,7 @@ cache: pip matrix: include: - - env: TOX_ENV=flake8 - - env: TOX_ENV=isort + - env: TOX_ENV=lint - python: 2.7 env: TOX_ENV=py27-django18 - python: 3.3 diff --git a/tox.ini b/tox.ini index d3c25059c..19da7c691 100644 --- a/tox.ini +++ b/tox.ini @@ -1,8 +1,7 @@ [tox] envlist = - flake8 - isort - {py27,py33,py34,py35}-django18, + lint + {py27,py33,py34,py35}-django18 {py27,py34,py35}-django110 {py27,py34,py35,py36}-django111 @@ -25,10 +24,10 @@ deps = pytest-cov>=2.2.1 -[testenv:flake8] -deps = flake8 -commands = flake8 - -[testenv:isort] -deps = isort -commands = isort --recursive --check-only --diff storages/ tests/ +[testenv:lint] +deps = + flake8 + isort +commands = + flake8 + isort --recursive --check-only --diff storages/ tests/ From 51f4b2617d05c7e23f7b12146ef9bb987207ce2e Mon Sep 17 00:00:00 2001 From: Jon Dufresne Date: Sat, 24 Jun 2017 17:01:32 -0700 Subject: [PATCH 154/174] Fix Dropbox tests to work with version 8.0.0 (#347) The client module was removed from the library. --- requirements-tests.txt | 2 +- tests/test_dropbox.py | 4 ---- tox.ini | 2 +- 3 files changed, 2 insertions(+), 6 deletions(-) diff --git a/requirements-tests.txt b/requirements-tests.txt index 4f6a95239..229a8a288 100644 --- a/requirements-tests.txt +++ b/requirements-tests.txt @@ -1,6 +1,6 @@ boto3>=1.2.3 boto>=2.32.0 -dropbox>=7.2.1 +dropbox>=8.0.0 Django>=1.8 flake8 google-cloud-storage>=0.22.0 diff --git a/tests/test_dropbox.py b/tests/test_dropbox.py index ae6789659..ad581a5dd 100644 --- a/tests/test_dropbox.py +++ b/tests/test_dropbox.py @@ -1,4 +1,3 @@ -import re from datetime import datetime from django.core.exceptions import ( @@ -60,9 +59,6 @@ class DropBoxTest(TestCase): - @mock.patch('dropbox.client._OAUTH2_ACCESS_TOKEN_PATTERN', - re.compile(r'.*')) - @mock.patch('dropbox.client.DropboxOAuth2Session') def setUp(self, *args): self.storage = dropbox.DropBoxStorage('foo') diff --git a/tox.ini b/tox.ini index 19da7c691..05dc3d145 100644 --- a/tox.ini +++ b/tox.ini @@ -18,7 +18,7 @@ deps = py27: mock boto3>=1.2.3 boto>=2.32.0 - dropbox>=7.2.1 + dropbox>=8.0.0 google-cloud-storage>=0.22.0 paramiko pytest-cov>=2.2.1 From 9dab0c6d2875c62125d254208f246f03391f1f91 Mon Sep 17 00:00:00 2001 From: Jon Dufresne Date: Sat, 24 Jun 2017 09:22:52 -0700 Subject: [PATCH 155/174] Simplify S3Boto3Storage.exists() by using boto3's head_object() Avoids unnecessary API requests to fetch and create the bucket when simply checking if a key exists. https://boto3.readthedocs.io/en/latest/reference/services/s3.html#S3.Client.head_object --- storages/backends/s3boto3.py | 9 +-------- tests/test_s3boto3.py | 24 +++++++++++++++++------- 2 files changed, 18 insertions(+), 15 deletions(-) diff --git a/storages/backends/s3boto3.py b/storages/backends/s3boto3.py index c85a8a49a..6af5e8f7b 100644 --- a/storages/backends/s3boto3.py +++ b/storages/backends/s3boto3.py @@ -455,18 +455,11 @@ def delete(self, name): self.bucket.Object(self._encode_name(name)).delete() def exists(self, name): - if not name: - try: - self.bucket - return True - except ImproperlyConfigured: - return False name = self._normalize_name(self._clean_name(name)) if self.entries: return name in self.entries - obj = self.bucket.Object(self._encode_name(name)) try: - obj.load() + self.connection.meta.client.head_object(Bucket=self.bucket_name, Key=name) return True except self.connection_response_error: return False diff --git a/tests/test_s3boto3.py b/tests/test_s3boto3.py index 855a66005..7b239900f 100644 --- a/tests/test_s3boto3.py +++ b/tests/test_s3boto3.py @@ -189,17 +189,27 @@ def test_auto_creating_bucket(self): ) def test_storage_exists(self): - obj = self.storage.bucket.Object.return_value self.assertTrue(self.storage.exists("file.txt")) - self.storage.bucket.Object.assert_called_with("file.txt") - obj.load.assert_called_with() + self.storage.connection.meta.client.head_object.assert_called_with( + Bucket=self.storage.bucket_name, + Key="file.txt", + ) def test_storage_exists_false(self): - obj = self.storage.bucket.Object.return_value - obj.load.side_effect = ClientError({'Error': {'Code': 123, 'Message': 'Fake'}}, 'load') + self.storage.connection.meta.client.head_object.side_effect = ClientError( + {'Error': {'Code': '404', 'Message': 'Not Found'}}, + 'HeadObject', + ) self.assertFalse(self.storage.exists("file.txt")) - self.storage.bucket.Object.assert_called_with("file.txt") - obj.load.assert_called_with() + self.storage.connection.meta.client.head_object.assert_called_with( + Bucket=self.storage.bucket_name, + Key='file.txt', + ) + + def test_storage_exists_doesnt_create_bucket(self): + with mock.patch.object(self.storage, '_get_or_create_bucket') as method: + self.storage.exists('file.txt') + method.assert_not_called() def test_storage_delete(self): self.storage.delete("path/to/file.txt") From b5027d26faebbb601183faa009f7b3e3ad58243e Mon Sep 17 00:00:00 2001 From: Jon Dufresne Date: Mon, 26 Jun 2017 06:47:43 -0700 Subject: [PATCH 156/174] Remove unnecessary indirection in s3boto3 backend The indirection was necessary for the older s3boto backend as it shared code with the (now deprecated) GS backend. The s3boto3 does not share code in this way. When the s3boto3 was developed, the indirection was carried over, but it is no longer necessary. To simplify the code, remove it. Other backends do not encode this style of indirection. --- storages/backends/s3boto3.py | 13 +++++-------- 1 file changed, 5 insertions(+), 8 deletions(-) diff --git a/storages/backends/s3boto3.py b/storages/backends/s3boto3.py index 6af5e8f7b..741bf3333 100644 --- a/storages/backends/s3boto3.py +++ b/storages/backends/s3boto3.py @@ -169,10 +169,7 @@ class S3Boto3Storage(Storage): mode and supports streaming(buffering) data in chunks to S3 when writing. """ - connection_service_name = 's3' default_content_type = 'application/octet-stream' - connection_response_error = ClientError - file_class = S3Boto3StorageFile # If config provided in init, signature_version and addressing_style settings/args are ignored. config = None @@ -259,7 +256,7 @@ def connection(self): if self._connection is None: session = boto3.session.Session() self._connection = session.resource( - self.connection_service_name, + 's3', aws_access_key_id=self.access_key, aws_secret_access_key=self.secret_key, aws_session_token=self.security_token, @@ -322,7 +319,7 @@ def _get_or_create_bucket(self, name): # Directly call head_bucket instead of bucket.load() because head_bucket() # fails on wrong region, while bucket.load() does not. bucket.meta.client.head_bucket(Bucket=name) - except self.connection_response_error as err: + except ClientError as err: if err.response['ResponseMetadata']['HTTPStatusCode'] == 301: raise ImproperlyConfigured("Bucket %s exists, but in a different " "region than we are connecting to. Set " @@ -403,8 +400,8 @@ def _compress_content(self, content): def _open(self, name, mode='rb'): name = self._normalize_name(self._clean_name(name)) try: - f = self.file_class(name, mode, self) - except self.connection_response_error as err: + f = S3Boto3StorageFile(name, mode, self) + except ClientError as err: if err.response['ResponseMetadata']['HTTPStatusCode'] == 404: raise IOError('File does not exist: %s' % name) raise # Let it bubble up if it was some other error @@ -461,7 +458,7 @@ def exists(self, name): try: self.connection.meta.client.head_object(Bucket=self.bucket_name, Key=name) return True - except self.connection_response_error: + except ClientError: return False def listdir(self, name): From d96107d809ec685193e96c0ff278d6e725c2f51f Mon Sep 17 00:00:00 2001 From: Filip Vavera Date: Wed, 17 May 2017 08:35:07 +0200 Subject: [PATCH 157/174] Add MIME types to gcloud storage --- .gitignore | 3 ++- AUTHORS | 1 + storages/backends/gcloud.py | 7 +++++-- tests/test_gcloud.py | 8 +++++--- 4 files changed, 13 insertions(+), 6 deletions(-) diff --git a/.gitignore b/.gitignore index 1da610f45..ea222dadc 100644 --- a/.gitignore +++ b/.gitignore @@ -22,4 +22,5 @@ setuptools* __pycache__ .coverage .cache -.idea \ No newline at end of file + +.idea/ diff --git a/AUTHORS b/AUTHORS index 6bd343522..602fdae67 100644 --- a/AUTHORS +++ b/AUTHORS @@ -30,6 +30,7 @@ By order of apparition, thanks: * Eirik Martiniussen Sylliaas (Google Cloud Storage native support) * Jody McIntyre (Google Cloud Storage native support) * Stanislav Kaledin (Bug fixes in SFTPStorage) + * Filip Vavera (Google Cloud MIME types support) Extra thanks to Marty for adding this in Django, you can buy his very interesting book (Pro Django). diff --git a/storages/backends/gcloud.py b/storages/backends/gcloud.py index f7d2d8ad8..6b433c602 100644 --- a/storages/backends/gcloud.py +++ b/storages/backends/gcloud.py @@ -1,3 +1,4 @@ +import mimetypes from tempfile import SpooledTemporaryFile from django.core.exceptions import ImproperlyConfigured @@ -21,6 +22,7 @@ class GoogleCloudFile(File): def __init__(self, name, mode, storage): self.name = name + self.mime_type = mimetypes.guess_type(name)[0] self._mode = mode self._storage = storage self.blob = storage.bucket.get_blob(name) @@ -70,7 +72,7 @@ def close(self): if self._file is not None: if self._is_dirty: self.file.seek(0) - self.blob.upload_from_file(self.file) + self.blob.upload_from_file(self.file, content_type=self.mime_type) self._file.close() self._file = None @@ -154,7 +156,8 @@ def _save(self, name, content): content.name = cleaned_name encoded_name = self._encode_name(name) file = GoogleCloudFile(encoded_name, 'rw', self) - file.blob.upload_from_file(content, size=content.size) + file.blob.upload_from_file(content, size=content.size, + content_type=file.mime_type) return cleaned_name def delete(self, name): diff --git a/tests/test_gcloud.py b/tests/test_gcloud.py index ef73f3f5d..e1c4cb603 100644 --- a/tests/test_gcloud.py +++ b/tests/test_gcloud.py @@ -6,6 +6,7 @@ import mock import datetime +import mimetypes from django.core.files.base import ContentFile from django.test import TestCase @@ -90,7 +91,8 @@ def test_open_write(self, MockBlob): # File data is not actually written until close(), so do that. f.close() - MockBlob().upload_from_file.assert_called_with(tmpfile) + MockBlob().upload_from_file.assert_called_with( + tmpfile, content_type=mimetypes.guess_type(self.filename)[0]) def test_save(self): data = 'This is some test content.' @@ -100,7 +102,7 @@ def test_save(self): self.storage._client.get_bucket.assert_called_with(self.bucket_name) self.storage._bucket.get_blob().upload_from_file.assert_called_with( - content, size=len(data)) + content, size=len(data), content_type=mimetypes.guess_type(self.filename)[0]) def test_save2(self): data = 'This is some test ủⓝï℅ⅆℇ content.' @@ -111,7 +113,7 @@ def test_save2(self): self.storage._client.get_bucket.assert_called_with(self.bucket_name) self.storage._bucket.get_blob().upload_from_file.assert_called_with( - content, size=len(data)) + content, size=len(data), content_type=mimetypes.guess_type(filename)[0]) def test_delete(self): self.storage.delete(self.filename) From 609c1abbd7e4fb29251afdc9983deb91f21ad850 Mon Sep 17 00:00:00 2001 From: Josh Schneier Date: Fri, 7 Jul 2017 02:01:09 -0400 Subject: [PATCH 158/174] Fix DropBoxBackend.url (#357) --- CHANGELOG.rst | 7 +++++++ storages/backends/dropbox.py | 2 +- tests/test_dropbox.py | 12 +++++++----- 3 files changed, 15 insertions(+), 6 deletions(-) diff --git a/CHANGELOG.rst b/CHANGELOG.rst index 5fcd68220..17ab7db20 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -1,6 +1,13 @@ django-storages change log ========================== +1.6.4 (XXXX-XX-XX) +****************** + +* Fix ``DropBoxStorage.url`` to work. (`#357`_) + +.. _#357: https://github.com/jschneier/django-storages/pull/357 + 1.6.3 (2017-06-23) ****************** diff --git a/storages/backends/dropbox.py b/storages/backends/dropbox.py index b8512b9a1..e520b3886 100644 --- a/storages/backends/dropbox.py +++ b/storages/backends/dropbox.py @@ -101,7 +101,7 @@ def accessed_time(self, name): def url(self, name): media = self.client.files_get_temporary_link(self._full_path(name)) - return media['link'] + return media.link def _open(self, name, mode='rb'): remote_file = DropBoxFile(self._full_path(name), self) diff --git a/tests/test_dropbox.py b/tests/test_dropbox.py index ad581a5dd..342a7d042 100644 --- a/tests/test_dropbox.py +++ b/tests/test_dropbox.py @@ -14,6 +14,10 @@ import mock +class F(object): + pass + + FILE_DATE = datetime(2015, 8, 24, 15, 6, 41) FILE_FIXTURE = { 'bytes': 4, @@ -52,10 +56,8 @@ 'size': '0 bytes', 'thumb_exists': False } -FILE_MEDIA_FIXTURE = { - 'link': 'https://dl.dropboxusercontent.com/1/view/foo', - 'expires': 'Fri, 16 Sep 2011 01:01:25 +0000', -} +FILE_MEDIA_FIXTURE = F() +FILE_MEDIA_FIXTURE.link = 'https://dl.dropboxusercontent.com/1/view/foo' class DropBoxTest(TestCase): @@ -123,7 +125,7 @@ def test_save(self, *args): return_value=FILE_MEDIA_FIXTURE) def test_url(self, *args): url = self.storage.url('foo') - self.assertEqual(url, FILE_MEDIA_FIXTURE['link']) + self.assertEqual(url, FILE_MEDIA_FIXTURE.link) def test_formats(self, *args): self.storage = dropbox.DropBoxStorage('foo') From 68eae0bd39b1f7e9267746da14ddd9f698696b57 Mon Sep 17 00:00:00 2001 From: Josh Schneier Date: Fri, 7 Jul 2017 13:17:25 -0400 Subject: [PATCH 159/174] Update CHANGELOG for #320 --- CHANGELOG.rst | 2 ++ 1 file changed, 2 insertions(+) diff --git a/CHANGELOG.rst b/CHANGELOG.rst index 17ab7db20..e319dfa31 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -4,8 +4,10 @@ django-storages change log 1.6.4 (XXXX-XX-XX) ****************** +* Files uploaded with ``GoogleCloudStorage`` will now set their appropriate mimetype (`#320`_) * Fix ``DropBoxStorage.url`` to work. (`#357`_) +.. _#320: https://github.com/jschneier/django-storages/pull/320 .. _#357: https://github.com/jschneier/django-storages/pull/357 1.6.3 (2017-06-23) From e88dd984e7c295c7bd87166f4f60dd0d6861c327 Mon Sep 17 00:00:00 2001 From: Josh Schneier Date: Fri, 7 Jul 2017 13:18:20 -0400 Subject: [PATCH 160/174] Fix some typos --- CHANGELOG.rst | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.rst b/CHANGELOG.rst index e319dfa31..ebad09ea6 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -13,7 +13,7 @@ django-storages change log 1.6.3 (2017-06-23) ****************** -* Revert default ``AWS_S3_SIGNATURE_VERSION`` to be V2 to restore backwards +* Revert default ``AWS_S3_SIGNATURE_VERSION`` to V2 to restore backwards compatability in ``S3Boto3``. It's recommended that all new projects set this to be ``'s3v4'``. (`#344`_) @@ -24,7 +24,7 @@ django-storages change log * Fix regression in ``safe_join()`` to handle a trailing slash in an intermediate path. (`#341`_) -* Fix regression in ``gs.GSBotoStorage`` got an unespected kwarg. +* Fix regression in ``gs.GSBotoStorage`` getting an unexpected kwarg. (`#342`_) .. _#341: https://github.com/jschneier/django-storages/pull/341 From 4501f992ae5ac12d094a153c01ef37e5c3c04a1f Mon Sep 17 00:00:00 2001 From: mabuaisha Date: Tue, 25 Jul 2017 18:24:17 +0300 Subject: [PATCH 161/174] Fix S3Boto3Storage access when AWS_PRELOAD_METADATA = True --- CHANGELOG.rst | 2 ++ storages/backends/s3boto3.py | 2 +- 2 files changed, 3 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.rst b/CHANGELOG.rst index ebad09ea6..688f338fb 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -6,9 +6,11 @@ django-storages change log * Files uploaded with ``GoogleCloudStorage`` will now set their appropriate mimetype (`#320`_) * Fix ``DropBoxStorage.url`` to work. (`#357`_) +* Fix ``S3Boto3Storage`` when ``AWS_PRELOAD_METADATA = True`` (`#366`_) .. _#320: https://github.com/jschneier/django-storages/pull/320 .. _#357: https://github.com/jschneier/django-storages/pull/357 +.. _#366: https://github.com/jschneier/django-storages/pull/366 1.6.3 (2017-06-23) ****************** diff --git a/storages/backends/s3boto3.py b/storages/backends/s3boto3.py index 741bf3333..97cd6294d 100644 --- a/storages/backends/s3boto3.py +++ b/storages/backends/s3boto3.py @@ -487,7 +487,7 @@ def size(self, name): if self.entries: entry = self.entries.get(name) if entry: - return entry.content_length + return entry.size return 0 return self.bucket.Object(self._encode_name(name)).content_length From c73680e7d7f906b841f77dd1aa4f5c3cfa8fb3a2 Mon Sep 17 00:00:00 2001 From: Josh Schneier Date: Wed, 26 Jul 2017 17:49:12 -0400 Subject: [PATCH 162/174] Fix for file-like objects without names (#368) --- storages/backends/s3boto3.py | 12 ++++++++++++ tests/test_s3boto3.py | 4 ++-- 2 files changed, 14 insertions(+), 2 deletions(-) diff --git a/storages/backends/s3boto3.py b/storages/backends/s3boto3.py index 97cd6294d..1547a47de 100644 --- a/storages/backends/s3boto3.py +++ b/storages/backends/s3boto3.py @@ -430,6 +430,18 @@ def _save(self, name, content): if self.preload_metadata: self._entries[encoded_name] = obj + # If both `name` and `content.name` are empty or None, your request + # can be rejected with `XAmzContentSHA256Mismatch` error, because in + # `django.core.files.storage.Storage.save` method your file-like object + # will be wrapped in `django.core.files.File` if no `chunks` method + # provided. `File.__bool__` method is Django-specific and depends on + # file name, for this reason`botocore.handlers.calculate_md5` can fail + # even if wrapped file-like object exists. To avoid Django-specific + # logic, pass internal file-like object if `content` is `File` + # class instance. + if isinstance(content, File): + content = content.file + self._save_content(obj, content, parameters=parameters) # Note: In boto3, after a put, last_modified is automatically reloaded # the next time it is accessed; no need to specifically reload it. diff --git a/tests/test_s3boto3.py b/tests/test_s3boto3.py index 7b239900f..adfed69b5 100644 --- a/tests/test_s3boto3.py +++ b/tests/test_s3boto3.py @@ -80,7 +80,7 @@ def test_storage_save(self): obj = self.storage.bucket.Object.return_value obj.upload_fileobj.assert_called_with( - content, + content.file, ExtraArgs={ 'ContentType': 'text/plain', 'ACL': self.storage.default_acl, @@ -96,7 +96,7 @@ def test_storage_save_gzipped(self): self.storage.save(name, content) obj = self.storage.bucket.Object.return_value obj.upload_fileobj.assert_called_with( - content, + content.file, ExtraArgs={ 'ContentType': 'application/octet-stream', 'ContentEncoding': 'gzip', From 829f8312c4ea401d542fcf8cb00018064a7dfd72 Mon Sep 17 00:00:00 2001 From: Josh Schneier Date: Wed, 26 Jul 2017 23:49:45 +0200 Subject: [PATCH 163/174] Update CHANGELOG for #195 #368 --- CHANGELOG.rst | 3 +++ 1 file changed, 3 insertions(+) diff --git a/CHANGELOG.rst b/CHANGELOG.rst index 688f338fb..5aa6d647b 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -7,10 +7,13 @@ django-storages change log * Files uploaded with ``GoogleCloudStorage`` will now set their appropriate mimetype (`#320`_) * Fix ``DropBoxStorage.url`` to work. (`#357`_) * Fix ``S3Boto3Storage`` when ``AWS_PRELOAD_METADATA = True`` (`#366`_) +* Fix ``S3Boto3Storage`` uploading file-like objects without names (`#195`_, `#368`_) .. _#320: https://github.com/jschneier/django-storages/pull/320 .. _#357: https://github.com/jschneier/django-storages/pull/357 .. _#366: https://github.com/jschneier/django-storages/pull/366 +.. _#195: https://github.com/jschneier/django-storages/pull/195 +.. _#368: https://github.com/jschneier/django-storages/pull/368 1.6.3 (2017-06-23) ****************** From 0a2c5589d2ce072fff3e9bdafe3c9a85f1751fcc Mon Sep 17 00:00:00 2001 From: Minseok Choi Date: Thu, 27 Jul 2017 11:59:25 +0900 Subject: [PATCH 164/174] Add import of default_storage to amazon-S3.rst --- docs/backends/amazon-S3.rst | 1 + 1 file changed, 1 insertion(+) diff --git a/docs/backends/amazon-S3.rst b/docs/backends/amazon-S3.rst index a56ab7949..b25e84054 100644 --- a/docs/backends/amazon-S3.rst +++ b/docs/backends/amazon-S3.rst @@ -150,6 +150,7 @@ Storage Standard file access options are available, and work as expected:: + >>> from django.core.files.storage import default_storage >>> default_storage.exists('storage_test') False >>> file = default_storage.open('storage_test', 'w') From 142e822bd6d6a84840a822c3ccf6a0bd9a09da8c Mon Sep 17 00:00:00 2001 From: Alex Tomkins Date: Thu, 27 Jul 2017 20:16:20 +0100 Subject: [PATCH 165/174] Create a session/connect per thread for s3boto3 (#358) Documentation for boto3 recommends a session per thread - https://boto3.readthedocs.io/en/latest/guide/resources.html#multithreading-multiprocessing As the storage class is (usually) only instantiated once per process, we need to set a thread local for each thread/connection used. Fixes #268 --- storages/backends/s3boto3.py | 10 ++++++---- tests/test_s3boto3.py | 23 ++++++++++++++++++++--- 2 files changed, 26 insertions(+), 7 deletions(-) diff --git a/storages/backends/s3boto3.py b/storages/backends/s3boto3.py index 1547a47de..6c18ced60 100644 --- a/storages/backends/s3boto3.py +++ b/storages/backends/s3boto3.py @@ -1,6 +1,7 @@ import mimetypes import os import posixpath +import threading from gzip import GzipFile from tempfile import SpooledTemporaryFile @@ -236,7 +237,7 @@ def __init__(self, acl=None, bucket=None, **settings): self._entries = {} self._bucket = None - self._connection = None + self._connections = threading.local() self.security_token = None if not self.access_key and not self.secret_key: @@ -253,9 +254,10 @@ def connection(self): # Note that proxies are handled by environment variables that the underlying # urllib/requests libraries read. See https://github.com/boto/boto3/issues/338 # and http://docs.python-requests.org/en/latest/user/advanced/#proxies - if self._connection is None: + connection = getattr(self._connections, 'connection', None) + if connection is None: session = boto3.session.Session() - self._connection = session.resource( + self._connections.connection = session.resource( 's3', aws_access_key_id=self.access_key, aws_secret_access_key=self.secret_key, @@ -265,7 +267,7 @@ def connection(self): endpoint_url=self.endpoint_url, config=self.config ) - return self._connection + return self._connections.connection @property def bucket(self): diff --git a/tests/test_s3boto3.py b/tests/test_s3boto3.py index adfed69b5..d0f0b0593 100644 --- a/tests/test_s3boto3.py +++ b/tests/test_s3boto3.py @@ -2,7 +2,9 @@ from __future__ import unicode_literals import gzip +import threading from datetime import datetime +from unittest import skipIf from botocore.exceptions import ClientError from django.conf import settings @@ -22,7 +24,7 @@ class S3Boto3TestCase(TestCase): def setUp(self): self.storage = s3boto3.S3Boto3Storage() - self.storage._connection = mock.MagicMock() + self.storage._connections.connection = mock.MagicMock() class S3Boto3StorageTests(S3Boto3TestCase): @@ -174,8 +176,8 @@ def test_storage_open_write(self): def test_auto_creating_bucket(self): self.storage.auto_create_bucket = True Bucket = mock.MagicMock() - self.storage._connection.Bucket.return_value = Bucket - self.storage._connection.meta.client.meta.region_name = 'sa-east-1' + self.storage._connections.connection.Bucket.return_value = Bucket + self.storage._connections.connection.meta.client.meta.region_name = 'sa-east-1' Bucket.meta.client.head_bucket.side_effect = ClientError({'Error': {}, 'ResponseMetadata': {'HTTPStatusCode': 404}}, @@ -342,3 +344,18 @@ def test_strip_signing_parameters(self): '%s?X-Amz-Date=12345678&X-Amz-Signature=Signature' % expected), expected) self.assertEqual(self.storage._strip_signing_parameters( '%s?expires=12345678&signature=Signature' % expected), expected) + + @skipIf(threading is None, 'Test requires threading') + def test_connection_threading(self): + connections = [] + + def thread_storage_connection(): + connections.append(self.storage.connection) + + for x in range(2): + t = threading.Thread(target=thread_storage_connection) + t.start() + t.join() + + # Connection for each thread needs to be unique + self.assertIsNot(connections[0], connections[1]) From 6f8ef37df6d7ff34b3f19e5b7dcd4743fea01b7e Mon Sep 17 00:00:00 2001 From: Josh Schneier Date: Thu, 27 Jul 2017 21:18:55 +0200 Subject: [PATCH 166/174] Update CHANGELOG for #268 #358 --- CHANGELOG.rst | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/CHANGELOG.rst b/CHANGELOG.rst index 5aa6d647b..7121ac128 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -8,12 +8,16 @@ django-storages change log * Fix ``DropBoxStorage.url`` to work. (`#357`_) * Fix ``S3Boto3Storage`` when ``AWS_PRELOAD_METADATA = True`` (`#366`_) * Fix ``S3Boto3Storage`` uploading file-like objects without names (`#195`_, `#368`_) +* ``S3Boto3Storage`` is now threadsafe - a separate session is created on a + per-thread basis (`#268`_, `#358`_) .. _#320: https://github.com/jschneier/django-storages/pull/320 .. _#357: https://github.com/jschneier/django-storages/pull/357 .. _#366: https://github.com/jschneier/django-storages/pull/366 .. _#195: https://github.com/jschneier/django-storages/pull/195 .. _#368: https://github.com/jschneier/django-storages/pull/368 +.. _#268: https://github.com/jschneier/django-storages/issues/268 +.. _#358: https://github.com/jschneier/django-storages/pull/358 1.6.3 (2017-06-23) ****************** From 523ab59696061cb1f940cb4fe75d58e68426fe45 Mon Sep 17 00:00:00 2001 From: Josh Schneier Date: Thu, 27 Jul 2017 23:48:41 +0200 Subject: [PATCH 167/174] Refs #366 -- Duck-type when returning size --- storages/backends/s3boto3.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/storages/backends/s3boto3.py b/storages/backends/s3boto3.py index 6c18ced60..727782f76 100644 --- a/storages/backends/s3boto3.py +++ b/storages/backends/s3boto3.py @@ -501,7 +501,7 @@ def size(self, name): if self.entries: entry = self.entries.get(name) if entry: - return entry.size + return entry.size if hasattr(entry, 'size') else entry.content_length return 0 return self.bucket.Object(self._encode_name(name)).content_length From 8eaa144b6fa5ffb88c1acbedf24ab9829cf4e703 Mon Sep 17 00:00:00 2001 From: Josh Schneier Date: Thu, 27 Jul 2017 23:56:33 +0200 Subject: [PATCH 168/174] Style and grammar updates --- CHANGELOG.rst | 20 ++++++++++---------- README.rst | 2 +- 2 files changed, 11 insertions(+), 11 deletions(-) diff --git a/CHANGELOG.rst b/CHANGELOG.rst index 7121ac128..cc4793bfc 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -57,7 +57,7 @@ django-storages change log end of the month - upgrading is recommended (`#273`_) * **Breaking:** The ``SFTPStorage`` backend now checks for the existence of the fallback ``~/.ssh/known_hosts`` before attempting to load it. If you had previously been passing in a path to a non-existent file it will no longer - attempt to load the fallback. (`issue #118`_ `pr #325`_) + attempt to load the fallback. (`#118`_, `#325`_) * **Breaking:** The default version value for ``AWS_S3_SIGNATURE_VERSION`` is now ``'s3v4'``. No changes should be required (`#335`_) * **Deprecation:** The undocumented ``gs.GSBotoStorage`` backend. See the new ``gcloud.GoogleCloudStorage`` @@ -71,9 +71,9 @@ django-storages change log * Add support for Django 1.11 (`#295`_) * Add ``project`` keyword support to GCS in ``LibCloudStorage`` backend (`#269`_) * Files that have a guessable encoding (e.g. gzip or compress) will be uploaded with that Content-Encoding in - the ``s3boto3`` backend (issue `#263`_ pr `#264`_) + the ``s3boto3`` backend (`#263`_, `#264`_) * The Dropbox backend now properly translates backslashes in Windows paths into forward slashes (`e52a127`_) -* The S3 backends now permit colons in the keys (`issue #248`_ `pr #322`_) +* The S3 backends now permit colons in the keys (`#248`_, `#322`_) .. _#217: https://github.com/jschneier/django-storages/pull/217 .. _#273: https://github.com/jschneier/django-storages/pull/273 @@ -89,10 +89,10 @@ django-storages change log .. _#264: https://github.com/jschneier/django-storages/pull/264 .. _e52a127: https://github.com/jschneier/django-storages/commit/e52a127523fdd5be50bb670ccad566c5d527f3d1 .. _#236: https://github.com/jschneier/django-storages/pull/236 -.. _issue #118: https://github.com/jschneier/django-storages/issues/118 -.. _pr #325: https://github.com/jschneier/django-storages/pull/325 -.. _issue #248: https://github.com/jschneier/django-storages/issues/248 -.. _pr #322: https://github.com/jschneier/django-storages/pull/322 +.. _#118: https://github.com/jschneier/django-storages/issues/118 +.. _#325: https://github.com/jschneier/django-storages/pull/325 +.. _#248: https://github.com/jschneier/django-storages/issues/248 +.. _#322: https://github.com/jschneier/django-storages/pull/322 .. _#335: https://github.com/jschneier/django-storages/pull/335 1.5.2 (2017-01-13) @@ -116,7 +116,7 @@ django-storages change log * **Breaking:** Drop support for Django 1.7 (`#185`_) * **Deprecation:** hashpath, image, overwrite, mogile, symlinkorcopy, database, mogile, couchdb. - See (`issue #202`_) to discuss maintenance going forward + See (`#202`_) to discuss maintenance going forward * Use a fixed ``mtime`` argument for ``GzipFile`` in ``S3BotoStorage`` and ``S3Boto3Storage`` to ensure a stable output for gzipped files * Use ``.putfileobj`` instead of ``.put`` in ``S3Boto3Storage`` to use the transfer manager, @@ -124,7 +124,7 @@ django-storages change log * Update ``S3Boto3Storage`` for Django 1.10 (`#181`_) (``get_modified_time`` and ``get_accessed_time``) * Fix bad kwarg name in ``S3Boto3Storage`` when `AWS_PRELOAD_METADATA` is `True` (`#189`_, `#190`_) -.. _issue #202: https://github.com/jschneier/django-storages/issues/202 +.. _#202: https://github.com/jschneier/django-storages/issues/202 .. _#201: https://github.com/jschneier/django-storages/pull/201 .. _#194: https://github.com/jschneier/django-storages/issues/194 .. _#190: https://github.com/jschneier/django-storages/pull/190 @@ -159,7 +159,7 @@ django-storages change log PR `#122`_ * Fix ``DropBoxStorage.exists`` check and add ``DropBoxStorage.url`` (`#127`_) * Add ``GS_HOST`` setting (with a default of ``GSConnection.DefaultHost``) to fix ``GSBotoStorage``. - Issue `#124`_. Fixed in `#125`_. + (`#124`_, `#125`_) .. _#122: https://github.com/jschneier/django-storages/pull/122 .. _#127: https://github.com/jschneier/django-storages/pull/127 diff --git a/README.rst b/README.rst index 3ccacdf47..214ba0c97 100644 --- a/README.rst +++ b/README.rst @@ -35,7 +35,7 @@ About django-storages is a project to provide a variety of storage backends in a single library. This library is usually compatible with the currently supported versions of -Django. Check the trove classifiers in setup.py to be sure. +Django. Check the Trove classifiers in setup.py to be sure. History ======= From 100c9b747b0f7adae2659476cce9822b32d29e22 Mon Sep 17 00:00:00 2001 From: Josh Schneier Date: Thu, 27 Jul 2017 23:58:44 +0200 Subject: [PATCH 169/174] Release version 1.6.4 --- CHANGELOG.rst | 2 +- storages/__init__.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.rst b/CHANGELOG.rst index cc4793bfc..5236286d5 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -1,7 +1,7 @@ django-storages change log ========================== -1.6.4 (XXXX-XX-XX) +1.6.4 (2017-07-27) ****************** * Files uploaded with ``GoogleCloudStorage`` will now set their appropriate mimetype (`#320`_) diff --git a/storages/__init__.py b/storages/__init__.py index 4574cc897..d07785c52 100644 --- a/storages/__init__.py +++ b/storages/__init__.py @@ -1 +1 @@ -__version__ = '1.6.3' +__version__ = '1.6.4' From c7d1000773204eceed94c05605c6e3ec5454929c Mon Sep 17 00:00:00 2001 From: Andrzej Winnicki Date: Mon, 31 Jul 2017 16:40:32 +0200 Subject: [PATCH 170/174] Add failing test which tries to save the same content twice (#371) See: https://github.com/jschneier/django-storages/issues/367 --- tests/test_s3boto3.py | 28 ++++++++++++++++++++++++++++ 1 file changed, 28 insertions(+) diff --git a/tests/test_s3boto3.py b/tests/test_s3boto3.py index d0f0b0593..ef1a263e3 100644 --- a/tests/test_s3boto3.py +++ b/tests/test_s3boto3.py @@ -128,6 +128,34 @@ def test_storage_save_gzip(self): zfile = gzip.GzipFile(mode='rb', fileobj=content) self.assertEqual(zfile.read(), b"I should be gzip'd") + def test_storage_save_gzip_twice(self): + """ + Test saving the same file content twice with gzip enabled. + """ + # Given + self.storage.gzip = True + name = 'test_storage_save.css' + content = ContentFile("I should be gzip'd") + + # When + self.storage.save(name, content) + self.storage.save('test_storage_save_2.css', content) + + # Then + obj = self.storage.bucket.Object.return_value + obj.upload_fileobj.assert_called_with( + mock.ANY, + ExtraArgs={ + 'ContentType': 'text/css', + 'ContentEncoding': 'gzip', + 'ACL': self.storage.default_acl, + } + ) + args, kwargs = obj.upload_fileobj.call_args + content = args[0] + zfile = gzip.GzipFile(mode='rb', fileobj=content) + self.assertEqual(zfile.read(), b"I should be gzip'd") + def test_compress_content_len(self): """ Test that file returned by _compress_content() is readable. From b965fb5a1b9b15da2afdb7bf7592663d2b79e3a2 Mon Sep 17 00:00:00 2001 From: mr-bo-jangles Date: Tue, 1 Aug 2017 07:52:31 +0100 Subject: [PATCH 171/174] Ensure that we're reading the file from the start (#373) --- storages/backends/s3boto3.py | 1 + 1 file changed, 1 insertion(+) diff --git a/storages/backends/s3boto3.py b/storages/backends/s3boto3.py index 727782f76..212179004 100644 --- a/storages/backends/s3boto3.py +++ b/storages/backends/s3boto3.py @@ -387,6 +387,7 @@ def _decode_name(self, name): def _compress_content(self, content): """Gzip a given string content.""" + content.seek(0) zbuf = BytesIO() zfile = GzipFile(mode='wb', compresslevel=6, fileobj=zbuf) try: From 06cf8209bf87524c51988ab8e72c8cf97eaa5b89 Mon Sep 17 00:00:00 2001 From: R&D Date: Tue, 1 Aug 2017 02:52:57 -0400 Subject: [PATCH 172/174] Fix the mtime when gzipping in S3Boto3Storage (#374) --- storages/backends/s3boto3.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/storages/backends/s3boto3.py b/storages/backends/s3boto3.py index 212179004..9caae4d0a 100644 --- a/storages/backends/s3boto3.py +++ b/storages/backends/s3boto3.py @@ -389,7 +389,11 @@ def _compress_content(self, content): """Gzip a given string content.""" content.seek(0) zbuf = BytesIO() - zfile = GzipFile(mode='wb', compresslevel=6, fileobj=zbuf) + # The GZIP header has a modification time attribute (see http://www.zlib.org/rfc-gzip.html) + # This means each time a file is compressed it changes even if the other contents don't change + # For S3 this defeats detection of changes using MD5 sums on gzipped files + # Fixing the mtime at 0.0 at compression time avoids this problem + zfile = GzipFile(mode='wb', compresslevel=6, fileobj=zbuf, mtime=0.0) try: zfile.write(force_bytes(content.read())) finally: From dc3902dc226bd69b4698b95c1bf6e6aae9a5fa50 Mon Sep 17 00:00:00 2001 From: Josh Schneier Date: Tue, 1 Aug 2017 09:11:57 +0200 Subject: [PATCH 173/174] Update CHANGELOG and release 1.6.5 --- CHANGELOG.rst | 12 ++++++++++++ storages/__init__.py | 2 +- 2 files changed, 13 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.rst b/CHANGELOG.rst index 5236286d5..195477e41 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -1,6 +1,18 @@ django-storages change log ========================== +1.6.5 (2017-08-01) +****************** + +* Fix Django 1.11 regression with gzipped content being saved twice + resulting in empty files (`#367`_, `#371`_, `#373`_) +* Fix the ``mtime`` when gzipping content on ``S3Boto3Storage`` (`#374`_) + +.. _#367: https://github.com/jschneier/django-storages/issues/367 +.. _#371: https://github.com/jschneier/django-storages/pull/371 +.. _#373: https://github.com/jschneier/django-storages/pull/373 +.. _#374: https://github.com/jschneier/django-storages/pull/374 + 1.6.4 (2017-07-27) ****************** diff --git a/storages/__init__.py b/storages/__init__.py index d07785c52..f3df7f04b 100644 --- a/storages/__init__.py +++ b/storages/__init__.py @@ -1 +1 @@ -__version__ = '1.6.4' +__version__ = '1.6.5' From adefe324be22843148b03a2068d2922eaae1a48b Mon Sep 17 00:00:00 2001 From: Max Malysh Date: Mon, 14 Aug 2017 06:32:20 +0300 Subject: [PATCH 174/174] Dropbox: large file support and a regression fix (#379) * Fix .save method of the Dropbox backend (#378) * Dropbox large file support (#301) * Make linter happy --- AUTHORS | 1 + storages/backends/dropbox.py | 31 ++++++++++++++++++++++++++++++- tests/test_dropbox.py | 19 +++++++++++++++++-- 3 files changed, 48 insertions(+), 3 deletions(-) diff --git a/AUTHORS b/AUTHORS index 602fdae67..25db7d015 100644 --- a/AUTHORS +++ b/AUTHORS @@ -31,6 +31,7 @@ By order of apparition, thanks: * Jody McIntyre (Google Cloud Storage native support) * Stanislav Kaledin (Bug fixes in SFTPStorage) * Filip Vavera (Google Cloud MIME types support) + * Max Malysh (Dropbox large file support) Extra thanks to Marty for adding this in Django, you can buy his very interesting book (Pro Django). diff --git a/storages/backends/dropbox.py b/storages/backends/dropbox.py index e520b3886..bae6deadb 100644 --- a/storages/backends/dropbox.py +++ b/storages/backends/dropbox.py @@ -21,6 +21,7 @@ from django.utils.deconstruct import deconstructible from dropbox import Dropbox from dropbox.exceptions import ApiError +from dropbox.files import CommitInfo, UploadSessionCursor from storages.utils import setting @@ -50,6 +51,8 @@ def file(self): class DropBoxStorage(Storage): """DropBox Storage class for Django pluggable storage system.""" + CHUNK_SIZE = 4 * 1024 * 1024 + def __init__(self, oauth2_access_token=None, root_path=None): oauth2_access_token = oauth2_access_token or setting('DROPBOX_OAUTH2_TOKEN') self.root_path = root_path or setting('DROPBOX_ROOT_PATH', '/') @@ -108,5 +111,31 @@ def _open(self, name, mode='rb'): return remote_file def _save(self, name, content): - self.client.files_upload(content, self._full_path(name)) + content.open() + if content.size <= self.CHUNK_SIZE: + self.client.files_upload(content.read(), self._full_path(name)) + else: + self._chunked_upload(content, self._full_path(name)) + content.close() return name + + def _chunked_upload(self, content, dest_path): + upload_session = self.client.files_upload_session_start( + content.read(self.CHUNK_SIZE) + ) + cursor = UploadSessionCursor( + session_id=upload_session.session_id, + offset=content.tell() + ) + commit = CommitInfo(path=dest_path) + + while content.tell() < content.size: + if (content.size - content.tell()) <= self.CHUNK_SIZE: + self.client.files_upload_session_finish( + content.read(self.CHUNK_SIZE), cursor, commit + ) + else: + self.client.files_upload_session_append_v2( + content.read(self.CHUNK_SIZE), cursor + ) + cursor.offset = content.tell() diff --git a/tests/test_dropbox.py b/tests/test_dropbox.py index 342a7d042..58d503628 100644 --- a/tests/test_dropbox.py +++ b/tests/test_dropbox.py @@ -5,6 +5,7 @@ ) from django.core.files.base import ContentFile, File from django.test import TestCase +from django.utils.six import BytesIO from storages.backends import dropbox @@ -118,8 +119,22 @@ def test_open(self, *args): @mock.patch('dropbox.Dropbox.files_upload', return_value='foo') - def test_save(self, *args): - self.storage._save('foo', b'bar') + def test_save(self, files_upload, *args): + self.storage._save('foo', File(BytesIO(b'bar'), 'foo')) + self.assertTrue(files_upload.called) + + @mock.patch('dropbox.Dropbox.files_upload') + @mock.patch('dropbox.Dropbox.files_upload_session_finish') + @mock.patch('dropbox.Dropbox.files_upload_session_append_v2') + @mock.patch('dropbox.Dropbox.files_upload_session_start', + return_value=mock.MagicMock(session_id='foo')) + def test_chunked_upload(self, start, append, finish, upload): + large_file = File(BytesIO(b'bar' * self.storage.CHUNK_SIZE), 'foo') + self.storage._save('foo', large_file) + self.assertTrue(start.called) + self.assertTrue(append.called) + self.assertTrue(finish.called) + self.assertFalse(upload.called) @mock.patch('dropbox.Dropbox.files_get_temporary_link', return_value=FILE_MEDIA_FIXTURE)