Mercurial > public > sg101
view core/s3.py @ 943:cf9918328c64
Haystack tweaks for Django 1.7.7.
I had to upgrade to Haystack 2.3.1 to get it to work with Django
1.7.7. I also had to update the Xapian backend. But I ran into
problems.
On my laptop anyway (Ubuntu 14.0.4), xapian gets mad when search terms
are greater than 245 chars (or something) when indexing. So I created
a custom field that would simply omit terms greater than 64 chars and
used this field everywhere I previously used a CharField.
Secondly, the custom search form was broken now. Something changed in
the Xapian backend and exact searches stopped working. Fortunately the
auto_query (which I was using originally and broke during an upgrade)
started working again. So I cut the search form back over to doing an
auto_query. I kept the form the same (3 fields) because I didn't want
to change the form and I think it's better that way.
author | Brian Neal <bgneal@gmail.com> |
---|---|
date | Wed, 13 May 2015 20:25:07 -0500 |
parents | bf5340705d0c |
children | e4f2d6a4b401 |
line wrap: on
line source
"""s3.py This module provides Amazon S3 convenience wrappers. """ from boto.s3.connection import S3Connection from boto.s3.key import Key class S3Bucket(object): """This class abstracts an Amazon S3 bucket. """ def __init__(self, access_key, secret_key, base_url, bucket_name): self.conn = S3Connection(access_key, secret_key) self.bucket = self.conn.get_bucket(bucket_name, validate=False) self.base_url = base_url if not base_url.endswith('/'): self.base_url += '/' self.name = bucket_name def upload_from_file(self, key_name, fp, metadata=None, public=True): """Uploads data from the file object fp to a new key named key_name. metadata, if not None, must be a dict of metadata key / value pairs which will be added to the key. If public is True, the key will be made public after the upload. Returns the URL to the uploaded file. """ key = self._make_key(key_name, metadata) key.set_contents_from_file(fp) if public: key.make_public() return '{}{}/{}'.format(self.base_url, self.name, key_name) def upload_from_filename(self, key_name, filename, metadata=None, public=True): """Uploads data from the file named by filename to a new key named key_name. metadata, if not None, must be a dict of metadata key / value pairs which will be added to the key. If public is True, the key will be made public after the upload. Returns the URL to the uploaded file. """ key = self._make_key(key_name, metadata) key.set_contents_from_filename(filename) if public: key.make_public() return '{}{}/{}'.format(self.base_url, self.name, key_name) def upload_from_string(self, key_name, content, metadata=None, public=True): """Creates a new key with the given key_name, and uploads the string content to it. metadata, if not None, must be a dict of metadata key / value pairs which will be added to the key. If public is True, the key will be made public after the upload. Returns the URL to the uploaded file. """ key = self._make_key(key_name, metadata) key.set_contents_from_string(content) if public: key.make_public() return '{}{}/{}'.format(self.base_url, self.name, key_name) def delete_keys(self, key_urls): """Deletes a set of keys, specified as a list of URLs. The URLs could have been returned by one or more of the upload_* methods. Returns the number of keys that were successfully deleted. """ if len(key_urls) == 0: return 0 prefix = '{}{}/'.format(self.base_url, self.name) prefix_len = len(prefix) keys = [] for url in key_urls: if url.startswith(prefix): key = url[prefix_len:] keys.append(key) response = self.bucket.delete_keys(keys, quiet=True) return len(key_urls) - len(response.errors) def _make_key(self, key_name, metadata): """Private method to create a key and optionally apply metadata to it. """ key = Key(self.bucket) key.key = key_name if metadata: for k, v in metadata.iteritems(): key.set_metadata(k, v) return key