diff --git a/storages/backends/gs.py b/storages/backends/gs.py index 907ab63..cbda3bc 100644 --- a/storages/backends/gs.py +++ b/storages/backends/gs.py @@ -1,3 +1,8 @@ +try: + from cStringIO import StringIO +except ImportError: + from StringIO import StringIO # noqa + from django.core.exceptions import ImproperlyConfigured from storages.backends.s3boto import S3BotoStorage, S3BotoStorageFile, setting @@ -12,7 +17,20 @@ class GSBotoStorageFile(S3BotoStorageFile): - buffer_size = setting('GS_FILE_BUFFER_SIZE', 5242880) + + def write(self, content): + if 'w' not in self._mode: + raise AttributeError("File was not opened in write mode.") + self.file = StringIO(content) + self._is_dirty = True + + def close(self): + if self._is_dirty: + provider = self.key.bucket.connection.provider + upload_headers = {provider.acl_header: self._storage.default_acl} + upload_headers.update(self._storage.headers) + self._storage._save_content(self.key, self.file, upload_headers) + self.key.close() class GSBotoStorage(S3BotoStorage): @@ -21,17 +39,20 @@ class GSBotoStorage(S3BotoStorage): file_class = GSBotoStorageFile key_class = GSKey + access_key_names = ['GS_ACCESS_KEY_ID'] + secret_key_names = ['GS_SECRET_ACCESS_KEY'] + access_key = setting('GS_ACCESS_KEY_ID') secret_key = setting('GS_SECRET_ACCESS_KEY') file_overwrite = setting('GS_FILE_OVERWRITE', True) headers = setting('GS_HEADERS', {}) - storage_bucket_name = setting('GS_BUCKET_NAME', None) + bucket_name = setting('GS_BUCKET_NAME', None) auto_create_bucket = setting('GS_AUTO_CREATE_BUCKET', False) default_acl = setting('GS_DEFAULT_ACL', 'public-read') bucket_acl = setting('GS_BUCKET_ACL', default_acl) querystring_auth = setting('GS_QUERYSTRING_AUTH', True) querystring_expire = setting('GS_QUERYSTRING_EXPIRE', 3600) - reduced_redundancy = setting('GS_REDUCED_REDUNDANCY', False) + durable_reduced_availability = setting('GS_DURABLE_REDUCED_AVAILABILITY', False) location = setting('GS_LOCATION', '') custom_domain = setting('GS_CUSTOM_DOMAIN') calling_format = setting('GS_CALLING_FORMAT', SubdomainCallingFormat()) @@ -45,3 +66,32 @@ class GSBotoStorage(S3BotoStorage): 'application/x-javascript', )) url_protocol = setting('GS_URL_PROTOCOL', 'http:') + + def _save_content(self, key, content, headers): + # only pass backwards incompatible arguments if they vary from the default + options = {} + if self.encryption: + options['encrypt_key'] = self.encryption + key.set_contents_from_file(content, headers=headers, + policy=self.default_acl, + rewind=True, **options) + + def _get_or_create_bucket(self, name): + """ + Retrieves a bucket if it exists, otherwise creates it. + """ + if self.durable_reduced_availability: + storage_class = 'DURABLE_REDUCED_AVAILABILITY' + else: + storage_class = 'STANDARD' + try: + return self.connection.get_bucket(name, + validate=self.auto_create_bucket) + except self.connection_response_error: + if self.auto_create_bucket: + bucket = self.connection.create_bucket(name, storage_class=storage_class) + bucket.set_acl(self.bucket_acl) + return bucket + raise ImproperlyConfigured("Bucket %s does not exist. Buckets " + "can be automatically created by " + "setting appropriate setting." % name) diff --git a/storages/backends/s3boto.py b/storages/backends/s3boto.py index 845a0b1..dc5f6e9 100644 --- a/storages/backends/s3boto.py +++ b/storages/backends/s3boto.py @@ -192,6 +192,10 @@ class S3BotoStorage(Storage): file_class = S3BotoStorageFile key_class = S3Key + # used for looking up the access and secret key from env vars + access_key_names = ['AWS_S3_ACCESS_KEY_ID', 'AWS_ACCESS_KEY_ID'] + secret_key_names = ['AWS_S3_SECRET_ACCESS_KEY', 'AWS_SECRET_ACCESS_KEY'] + access_key = setting('AWS_S3_ACCESS_KEY_ID', setting('AWS_ACCESS_KEY_ID')) secret_key = setting('AWS_S3_SECRET_ACCESS_KEY', setting('AWS_SECRET_ACCESS_KEY')) file_overwrite = setting('AWS_S3_FILE_OVERWRITE', True) @@ -279,19 +283,14 @@ def _get_access_keys(self): are provided to the class in the constructor or in the settings then get them from the environment variables. """ - access_key = self.access_key - secret_key = self.secret_key - - if (access_key or secret_key) and (not access_key or not secret_key): - # TODO: this seems to be broken - access_key = os.environ.get(self.access_key) - secret_key = os.environ.get(self.secret_key) - - if access_key and secret_key: - # Both were provided, so use them - return access_key, secret_key - - return None, None + def lookup_env(names): + for name in names: + value = os.environ.get(name) + if value: + return value + access_key = self.access_key or lookup_env(self.access_key_names) + secret_key = self.secret_key or lookup_env(self.secret_key_names) + return access_key, secret_key def _get_or_create_bucket(self, name): """ @@ -349,7 +348,7 @@ def _compress_content(self, content): def _open(self, name, mode='rb'): name = self._normalize_name(self._clean_name(name)) - f = S3BotoStorageFile(name, mode, self) + f = self.file_class(name, mode, self) if not f.key: raise IOError('File does not exist: %s' % name) return f @@ -377,6 +376,10 @@ def _save(self, name, content): self._entries[encoded_name] = key key.set_metadata('Content-Type', content_type) + self._save_content(key, content, headers=headers) + return cleaned_name + + def _save_content(self, key, content, headers): # only pass backwards incompatible arguments if they vary from the default kwargs = {} if self.encryption: @@ -385,7 +388,6 @@ def _save(self, name, content): policy=self.default_acl, reduced_redundancy=self.reduced_redundancy, rewind=True, **kwargs) - return cleaned_name def delete(self, name): name = self._normalize_name(self._clean_name(name))