Skip to content

Commit

Permalink
Fixed key handling in S3Boto and Google Storage backend and made sure…
Browse files Browse the repository at this point in the history
… the Google Storage backend uses the correct file class and boto API for durable reduced availability.
  • Loading branch information
jezdez committed Jan 26, 2013
1 parent 02048e5 commit 9b3d509
Show file tree
Hide file tree
Showing 2 changed files with 51 additions and 17 deletions.
36 changes: 34 additions & 2 deletions storages/backends/gs.py
Original file line number Diff line number Diff line change
Expand Up @@ -39,17 +39,20 @@ class GSBotoStorage(S3BotoStorage):
file_class = GSBotoStorageFile
key_class = GSKey

access_key_names = ['GS_ACCESS_KEY_ID']
secret_key_names = ['GS_SECRET_ACCESS_KEY']

access_key = setting('GS_ACCESS_KEY_ID')
secret_key = setting('GS_SECRET_ACCESS_KEY')
file_overwrite = setting('GS_FILE_OVERWRITE', True)
headers = setting('GS_HEADERS', {})
storage_bucket_name = setting('GS_BUCKET_NAME', None)
bucket_name = setting('GS_BUCKET_NAME', None)
auto_create_bucket = setting('GS_AUTO_CREATE_BUCKET', False)
default_acl = setting('GS_DEFAULT_ACL', 'public-read')
bucket_acl = setting('GS_BUCKET_ACL', default_acl)
querystring_auth = setting('GS_QUERYSTRING_AUTH', True)
querystring_expire = setting('GS_QUERYSTRING_EXPIRE', 3600)
reduced_redundancy = setting('GS_REDUCED_REDUNDANCY', False)
durable_reduced_availability = setting('GS_DURABLE_REDUCED_AVAILABILITY', False)
location = setting('GS_LOCATION', '')
custom_domain = setting('GS_CUSTOM_DOMAIN')
calling_format = setting('GS_CALLING_FORMAT', SubdomainCallingFormat())
Expand All @@ -63,3 +66,32 @@ class GSBotoStorage(S3BotoStorage):
'application/x-javascript',
))
url_protocol = setting('GS_URL_PROTOCOL', 'http:')

def _save_content(self, key, content, headers):
# only pass backwards incompatible arguments if they vary from the default
options = {}
if self.encryption:
options['encrypt_key'] = self.encryption
key.set_contents_from_file(content, headers=headers,
policy=self.default_acl,
rewind=True, **options)

def _get_or_create_bucket(self, name):
"""
Retrieves a bucket if it exists, otherwise creates it.
"""
if self.durable_reduced_availability:
storage_class = 'DURABLE_REDUCED_AVAILABILITY'
else:
storage_class = 'STANDARD'
try:
return self.connection.get_bucket(name,
validate=self.auto_create_bucket)
except self.connection_response_error:
if self.auto_create_bucket:
bucket = self.connection.create_bucket(name, storage_class=storage_class)
bucket.set_acl(self.bucket_acl)
return bucket
raise ImproperlyConfigured("Bucket %s does not exist. Buckets "
"can be automatically created by "
"setting appropriate setting." % name)
32 changes: 17 additions & 15 deletions storages/backends/s3boto.py
Original file line number Diff line number Diff line change
Expand Up @@ -192,6 +192,10 @@ class S3BotoStorage(Storage):
file_class = S3BotoStorageFile
key_class = S3Key

# used for looking up the access and secret key from env vars
access_key_names = ['AWS_S3_ACCESS_KEY_ID', 'AWS_ACCESS_KEY_ID']
secret_key_names = ['AWS_S3_SECRET_ACCESS_KEY', 'AWS_SECRET_ACCESS_KEY']

access_key = setting('AWS_S3_ACCESS_KEY_ID', setting('AWS_ACCESS_KEY_ID'))
secret_key = setting('AWS_S3_SECRET_ACCESS_KEY', setting('AWS_SECRET_ACCESS_KEY'))
file_overwrite = setting('AWS_S3_FILE_OVERWRITE', True)
Expand Down Expand Up @@ -279,19 +283,14 @@ def _get_access_keys(self):
are provided to the class in the constructor or in the
settings then get them from the environment variables.
"""
access_key = self.access_key
secret_key = self.secret_key

if (access_key or secret_key) and (not access_key or not secret_key):
# TODO: this seems to be broken
access_key = os.environ.get(self.access_key)
secret_key = os.environ.get(self.secret_key)

if access_key and secret_key:
# Both were provided, so use them
return access_key, secret_key

return None, None
def lookup_env(names):
for name in names:
value = os.environ.get(name)
if value:
return value
access_key = self.access_key or lookup_env(self.access_key_names)
secret_key = self.secret_key or lookup_env(self.secret_key_names)
return access_key, secret_key

def _get_or_create_bucket(self, name):
"""
Expand Down Expand Up @@ -348,7 +347,7 @@ def _compress_content(self, content):

def _open(self, name, mode='rb'):
name = self._normalize_name(self._clean_name(name))
f = S3BotoStorageFile(name, mode, self)
f = self.file_class(name, mode, self)
if not f.key:
raise IOError('File does not exist: %s' % name)
return f
Expand Down Expand Up @@ -376,6 +375,10 @@ def _save(self, name, content):
self._entries[encoded_name] = key

key.set_metadata('Content-Type', content_type)
self._save_content(key, content, headers=headers)
return cleaned_name

def _save_content(self, key, content, headers):
# only pass backwards incompatible arguments if they vary from the default
kwargs = {}
if self.encryption:
Expand All @@ -384,7 +387,6 @@ def _save(self, name, content):
policy=self.default_acl,
reduced_redundancy=self.reduced_redundancy,
rewind=True, **kwargs)
return cleaned_name

def delete(self, name):
name = self._normalize_name(self._clean_name(name))
Expand Down

0 comments on commit 9b3d509

Please sign in to comment.