changeset 697:67f8d49a9377

Cleaned up the code a bit. Separated the S3 stuff out into its own class. This class maybe should be in core. Still want to do some kind of context manager around the temporary file we are creating to ensure it gets deleted.
author Brian Neal <bgneal@gmail.com>
date Sun, 08 Sep 2013 21:02:58 -0500
parents b2a8fde3173a
children d33bedc3be74
files user_photos/forms.py user_photos/images.py user_photos/s3.py
diffstat 3 files changed, 75 insertions(+), 26 deletions(-) [+]
line wrap: on
line diff
--- a/user_photos/forms.py	Sun Sep 08 19:06:54 2013 -0500
+++ b/user_photos/forms.py	Sun Sep 08 21:02:58 2013 -0500
@@ -1,8 +1,10 @@
 """Forms for the user_photos application."""
 from django import forms
+from django.conf import settings
 
 from user_photos.models import Photo
 from user_photos.images import process_file
+from user_photos.s3 import S3Bucket
 
 
 class UploadForm(forms.Form):
@@ -19,7 +21,12 @@
         This function should only be called if is_valid() returns True.
 
         """
-        url, thumb_url = process_file(self.cleaned_data['image_file'], self.user)
+        bucket = S3Bucket(settings.USER_PHOTOS_ACCESS_KEY,
+                          settings.USER_PHOTOS_SECRET_KEY,
+                          settings.USER_PHOTOS_BUCKET)
+        url, thumb_url = process_file(self.cleaned_data['image_file'],
+                                      self.user,
+                                      bucket)
         photo = Photo(user=self.user, url=url, thumb_url=thumb_url)
         photo.save()
         return photo
--- a/user_photos/images.py	Sun Sep 08 19:06:54 2013 -0500
+++ b/user_photos/images.py	Sun Sep 08 21:02:58 2013 -0500
@@ -11,14 +11,12 @@
 
 from django.conf import settings
 from PIL import Image
-from boto.s3.connection import S3Connection
-from boto.s3.key import Key
 
 
 logger = logging.getLogger(__name__)
 
 
-def process_file(f, user):
+def process_file(f, user, bucket):
     """Perform processing on the given uploaded image file:
 
     * The image is resized if necessary
@@ -51,30 +49,15 @@
     image.save(thumb, format=image.format)
 
     # Upload both images to S3
-    logger.debug('Getting connection / bucket')
-    conn = S3Connection(settings.USER_PHOTOS_ACCESS_KEY,
-                        settings.USER_PHOTOS_SECRET_KEY)
-    bucket = conn.get_bucket(settings.USER_PHOTOS_BUCKET, validate=False)
-
+    logging.debug('Uploading image')
     now = datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S')
-
-    logging.debug('Uploading image')
-    k1 = Key(bucket)
-    k1.key = unique_key + ext
-    k1.set_metadata('user', user.username)
-    k1.set_metadata('date', now)
-    k1.set_contents_from_filename(filename)
+    metadata = {'user': user.username, 'date': now}
+    file_key = unique_key + ext
+    bucket.upload_from_filename(file_key, filename, metadata)
 
     logging.debug('Uploading thumbnail')
-    k2 = Key(bucket)
-    k2.key = '{}t{}'.format(unique_key, ext)
-    k2.set_metadata('user', user.username)
-    k2.set_metadata('date', now)
-    k2.set_contents_from_string(thumb.getvalue())
-
-    logging.debug('Making public')
-    k1.make_public()
-    k2.make_public()
+    thumb_key = '{}t{}'.format(unique_key, ext)
+    bucket.upload_from_string(thumb_key, thumb.getvalue())
 
     os.remove(filename)
 
@@ -83,4 +66,4 @@
     url_base = '{}/{}/'.format(settings.USER_PHOTOS_BASE_URL,
                                settings.USER_PHOTOS_BUCKET)
 
-    return (url_base + k1.key, url_base + k2.key)
+    return (url_base + file_key, url_base + thumb_key)
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/user_photos/s3.py	Sun Sep 08 21:02:58 2013 -0500
@@ -0,0 +1,59 @@
+"""s3.py
+
+This module provides the necessary S3 upload functionality for the user_photos
+application.
+
+"""
+from boto.s3.connection import S3Connection
+from boto.s3.key import Key
+
+
+class S3Bucket(object):
+    """This class abstracts an Amazon S3 bucket.
+
+    We currently only need upload functionality.
+
+    """
+    def __init__(self, access_key, secret_key, bucket_name):
+        self.conn = S3Connection(access_key, secret_key)
+        self.bucket = self.conn.get_bucket(bucket_name, validate=False)
+
+    def upload_from_filename(self, key_name, filename, metadata=None,
+            public=True):
+        """Uploads data from the file named by filename to a new key named
+        key_name. metadata, if not None, must be a dict of metadata key / value
+        pairs which will be added to the key.
+
+        If public is True, the key will be made public after the upload.
+
+        """
+        key = self._make_key(key_name, metadata)
+        key.set_contents_from_filename(filename)
+        if public:
+            key.make_public()
+
+    def upload_from_string(self, key_name, content, metadata=None,
+            public=True):
+        """Creates a new key with the given key_name, and uploads the string
+        content to it. metadata, if not None, must be a dict of metadata key /
+        value pairs which will be added to the key.
+
+        If public is True, the key will be made public after the upload.
+
+        """
+        key = self._make_key(key_name, metadata)
+        key.set_contents_from_string(content)
+        if public:
+            key.make_public()
+
+    def _make_key(self, key_name, metadata):
+        """Private method to create a key and optionally apply metadata to
+        it.
+
+        """
+        key = Key(self.bucket)
+        key.key = key_name
+        if metadata:
+            for k, v in metadata.iteritems():
+                key.set_metadata(k, v)
+        return key