Mercurial > public > sg101
changeset 945:7ab180ff6f7b
Merge upstream.
author | Brian Neal <bgneal@gmail.com> |
---|---|
date | Wed, 13 May 2015 20:29:23 -0500 |
parents | cf9918328c64 (diff) de4425ad62fb (current diff) |
children | 87fda1bb8d38 |
files | |
diffstat | 70 files changed, 1280 insertions(+), 722 deletions(-) [+] |
line wrap: on
line diff
--- a/antispam/__init__.py Wed May 13 20:27:17 2015 -0500 +++ b/antispam/__init__.py Wed May 13 20:29:23 2015 -0500 @@ -1,12 +1,6 @@ -from django.contrib.auth import views as auth_views - -from antispam.decorators import log_auth_failures -import antispam.receivers +default_app_config = 'antispam.apps.AntiSpamConfig' SPAM_PHRASE_KEY = "antispam.spam_phrases" BUSTED_MESSAGE = ("Your post has tripped our spam filter. Your account has " "been suspended pending a review of your post. If this was a mistake " "then we apologize; your account will be restored shortly.") - -# Install rate limiting on auth login -auth_views.login = log_auth_failures('Login')(auth_views.login)
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/antispam/apps.py Wed May 13 20:29:23 2015 -0500 @@ -0,0 +1,16 @@ +from django.apps import AppConfig +import django.contrib.auth.views + +from antispam.decorators import log_auth_failures + + +class AntiSpamConfig(AppConfig): + name = 'antispam' + verbose_name = 'Anti-Spam' + + def ready(self): + import antispam.receivers + + # Install rate limiting on auth login + django.contrib.auth.views.login = log_auth_failures('Login')( + django.contrib.auth.views.login)
--- a/antispam/receivers.py Wed May 13 20:27:17 2015 -0500 +++ b/antispam/receivers.py Wed May 13 20:29:23 2015 -0500 @@ -24,6 +24,7 @@ if user: logger.info('User logout signal: %s', user.username) + def login_failed_callback(sender, credentials, **kwargs): """Signal callback for a login failure event.""" logger.error('User login failed signal from %s: %s', sender,
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/antispam/tests/test_receivers.py Wed May 13 20:29:23 2015 -0500 @@ -0,0 +1,33 @@ +"""Tests for the antispam signal handlers.""" +import logging + +from django.contrib.auth.models import User +from django.test import TestCase + +from testfixtures import log_capture + + +class AntispamSignalRcvrTestCase(TestCase): + + def setUp(self): + self.user = User.objects.create_user('steve', 'steve@example.com', 'pwd') + + # Temporarily enable logging + self.old_disable = logging.getLogger().manager.disable + logging.disable(logging.NOTSET) + + def tearDown(self): + logging.disable(self.old_disable) + + @log_capture('auth') + def test_login_logout_callback(self, lc): + self.assertTrue(self.client.login(username='steve', password='pwd')) + self.client.logout() + lc.check(('auth', 'INFO', 'User login signal: steve'), + ('auth', 'INFO', 'User logout signal: steve')) + + @log_capture('auth') + def test_login_failed_callback(self, lc): + self.assertFalse(self.client.login(username='steve', password='xxx')) + lc.check(('auth', 'ERROR', + 'User login failed signal from django.contrib.auth: steve'))
--- a/bio/__init__.py Wed May 13 20:27:17 2015 -0500 +++ b/bio/__init__.py Wed May 13 20:29:23 2015 -0500 @@ -1,1 +1,1 @@ -import signals +default_app_config = 'bio.apps.BioConfig'
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/bio/apps.py Wed May 13 20:29:23 2015 -0500 @@ -0,0 +1,9 @@ +from django.apps import AppConfig + + +class BioConfig(AppConfig): + name = 'bio' + verbose_name = 'Biography' + + def ready(self): + import bio.receivers
--- a/bio/models.py Wed May 13 20:27:17 2015 -0500 +++ b/bio/models.py Wed May 13 20:29:23 2015 -0500 @@ -13,6 +13,7 @@ from core.markup import SiteMarkup import bio.flags +from bio.signals import notify_profile_content_update # These are the secondary user status enumeration values. @@ -221,6 +222,3 @@ if self.count == 1: return u"1 %s" % self.badge.name return u"%d %ss" % (self.count, self.badge.name) - -# Put down here to avoid a circular import -from bio.signals import notify_profile_content_update
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/bio/receivers.py Wed May 13 20:29:23 2015 -0500 @@ -0,0 +1,95 @@ +""" +Signal handlers & signals for the bio application. + +""" +from django.db.models.signals import post_save +from django.contrib.auth.models import User + +import bio.badges +from bio.models import UserProfile + +from donations.models import Donation +from weblinks.models import Link +from downloads.models import Download +from news.models import Story +from potd.models import Photo + + +def on_user_save(sender, **kwargs): + """ + This signal handler ensures that every User has a corresonding + UserProfile. It is called after User instance is saved. It creates + a UserProfile for the User if the created argument is True. + + """ + created = kwargs['created'] + if created: + user = kwargs['instance'] + profile = UserProfile() + profile.user = user + profile.save() + + +def on_donation_save(sender, **kwargs): + """ + This function is called after a Donation is saved. + If the Donation was newly created and not anonymous, + award the user a contributor pin. + + """ + if kwargs['created']: + donation = kwargs['instance'] + if not donation.is_anonymous and donation.user: + bio.badges.award_badge(bio.badges.CONTRIBUTOR_PIN, donation.user) + + +def on_link_save(sender, **kwargs): + """ + This function is called after a Link is saved. If the Link was newly + created, award the user a link pin. + + """ + if kwargs['created']: + link = kwargs['instance'] + bio.badges.award_badge(bio.badges.LINK_PIN, link.user) + + +def on_download_save(sender, **kwargs): + """ + This function is called after a Download is saved. If the Download was + newly created, award the user a download pin. + + """ + if kwargs['created']: + download = kwargs['instance'] + bio.badges.award_badge(bio.badges.DOWNLOAD_PIN, download.user) + + +def on_story_save(sender, **kwargs): + """ + This function is called after a Story is saved. If the Story was + newly created, award the user a news pin. + + """ + if kwargs['created']: + story = kwargs['instance'] + bio.badges.award_badge(bio.badges.NEWS_PIN, story.submitter) + + +def on_photo_save(sender, **kwargs): + """ + This function is called after a Photo is saved. If the Photo was + newly created, award the user a POTD pin. + + """ + if kwargs['created']: + photo = kwargs['instance'] + bio.badges.award_badge(bio.badges.POTD_PIN, photo.user) + + +post_save.connect(on_user_save, sender=User, dispatch_uid='bio.receivers') +post_save.connect(on_donation_save, sender=Donation, dispatch_uid='bio.receivers') +post_save.connect(on_link_save, sender=Link, dispatch_uid='bio.receivers') +post_save.connect(on_download_save, sender=Download, dispatch_uid='bio.receivers') +post_save.connect(on_story_save, sender=Story, dispatch_uid='bio.receivers') +post_save.connect(on_photo_save, sender=Photo, dispatch_uid='bio.receivers')
--- a/bio/search_indexes.py Wed May 13 20:27:17 2015 -0500 +++ b/bio/search_indexes.py Wed May 13 20:29:23 2015 -0500 @@ -2,10 +2,11 @@ from haystack import indexes from bio.models import UserProfile +from custom_search.fields import MaxTermSizeCharField class UserProfileIndex(indexes.SearchIndex, indexes.Indexable): - text = indexes.CharField(document=True, use_template=True) + text = MaxTermSizeCharField(document=True, use_template=True) author = indexes.CharField(model_attr='user') def get_model(self):
--- a/bio/signals.py Wed May 13 20:27:17 2015 -0500 +++ b/bio/signals.py Wed May 13 20:29:23 2015 -0500 @@ -1,98 +1,8 @@ """ -Signal handlers & signals for the bio application. - +Signals for the bio application """ -from django.db.models.signals import post_save -from django.contrib.auth.models import User import django.dispatch -from donations.models import Donation -from weblinks.models import Link -from downloads.models import Download -from news.models import Story -from potd.models import Photo - - -def on_user_save(sender, **kwargs): - """ - This signal handler ensures that every User has a corresonding - UserProfile. It is called after User instance is saved. It creates - a UserProfile for the User if the created argument is True. - - """ - created = kwargs['created'] - if created: - user = kwargs['instance'] - profile = UserProfile() - profile.user = user - profile.save() - - -def on_donation_save(sender, **kwargs): - """ - This function is called after a Donation is saved. - If the Donation was newly created and not anonymous, - award the user a contributor pin. - - """ - if kwargs['created']: - donation = kwargs['instance'] - if not donation.is_anonymous and donation.user: - bio.badges.award_badge(bio.badges.CONTRIBUTOR_PIN, donation.user) - - -def on_link_save(sender, **kwargs): - """ - This function is called after a Link is saved. If the Link was newly - created, award the user a link pin. - - """ - if kwargs['created']: - link = kwargs['instance'] - bio.badges.award_badge(bio.badges.LINK_PIN, link.user) - - -def on_download_save(sender, **kwargs): - """ - This function is called after a Download is saved. If the Download was - newly created, award the user a download pin. - - """ - if kwargs['created']: - download = kwargs['instance'] - bio.badges.award_badge(bio.badges.DOWNLOAD_PIN, download.user) - - -def on_story_save(sender, **kwargs): - """ - This function is called after a Story is saved. If the Story was - newly created, award the user a news pin. - - """ - if kwargs['created']: - story = kwargs['instance'] - bio.badges.award_badge(bio.badges.NEWS_PIN, story.submitter) - - -def on_photo_save(sender, **kwargs): - """ - This function is called after a Photo is saved. If the Photo was - newly created, award the user a POTD pin. - - """ - if kwargs['created']: - photo = kwargs['instance'] - bio.badges.award_badge(bio.badges.POTD_PIN, photo.user) - - -post_save.connect(on_user_save, sender=User, dispatch_uid='bio.signals') -post_save.connect(on_donation_save, sender=Donation, dispatch_uid='bio.signals') -post_save.connect(on_link_save, sender=Link, dispatch_uid='bio.signals') -post_save.connect(on_download_save, sender=Download, dispatch_uid='bio.signals') -post_save.connect(on_story_save, sender=Story, dispatch_uid='bio.signals') -post_save.connect(on_photo_save, sender=Photo, dispatch_uid='bio.signals') - -# Signals for the bio application # # This signal is sent whenever a profile has had its textual content updated. # The provided arguments to the receiver function are: @@ -107,8 +17,3 @@ """ profile_content_update.send_robust(profile) - - -# To avoid circular imports -import bio.badges -from bio.models import UserProfile
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/bio/tests/test_receivers.py Wed May 13 20:29:23 2015 -0500 @@ -0,0 +1,168 @@ +"""Tests for the bio app's signal handlers.""" + +from collections import namedtuple + +from django.contrib.auth.models import User +from django.db.models.signals import post_save +from django.test import TestCase + +import bio.badges +from bio.models import Badge +from bio.models import BadgeOwnership +from bio.models import UserProfile +import custom_search.receivers +from donations.models import Donation +from downloads.models import Download +import downloads.receivers +from news.models import Story +from potd.models import Photo +import potd.receivers +from weblinks.models import Link +import weblinks.receivers + + +FakeDonation = namedtuple('FakeDonation', ['is_anonymous', 'user']) +FakeUserObject = namedtuple('FakeUserObject', ['user']) +FakeStory = namedtuple('FakeStory', ['submitter']) + + +class ReceiverTestCase(TestCase): + + fixtures = ['badges.json'] + + def setUp(self): + self.user = User.objects.create_user('user', 'user@example.com', 'pw') + + # Don't let our custom search signal handler class catch any of the + # signals we are throwing here. + custom_search.receivers.signal_processor.teardown() + + # Don't let these signal handlers fire either + post_save.disconnect(sender=Link, dispatch_uid='weblinks.receivers') + post_save.disconnect(sender=Download, dispatch_uid='downloads.receivers') + post_save.disconnect(sender=Photo, dispatch_uid='potd.receivers') + + def tearDown(self): + custom_search.receivers.signal_processor.setup() + post_save.connect(weblinks.receivers.on_link_save, sender=Link, + dispatch_uid='weblinks.receivers') + post_save.connect(downloads.receivers.on_download_save, sender=Download, + dispatch_uid='downloads.receivers') + post_save.connect(potd.receivers.on_photo_save, sender=Photo, + dispatch_uid='potd.receivers') + + def test_profile_creation(self): + profile = UserProfile.objects.get(user=self.user) + self.assertEqual(self.user.profile, profile) + + def test_donation_created(self): + donation = FakeDonation(False, self.user) + post_save.send(sender=Donation, created=True, instance=donation) + + badge = Badge.objects.get(numeric_id=bio.badges.CONTRIBUTOR_PIN) + ownership = BadgeOwnership.objects.get(badge=badge, profile=self.user.profile) + self.assertEqual(ownership.count, 1) + + def test_donation_updated(self): + donation = FakeDonation(False, self.user) + post_save.send(sender=Donation, created=False, instance=donation) + + badge = Badge.objects.get(numeric_id=bio.badges.CONTRIBUTOR_PIN) + self.assertRaises(BadgeOwnership.DoesNotExist, + BadgeOwnership.objects.get, + badge=badge, profile=self.user.profile) + + def test_donation_anonymous(self): + donation = FakeDonation(True, self.user) + post_save.send(sender=Donation, created=False, instance=donation) + + badge = Badge.objects.get(numeric_id=bio.badges.CONTRIBUTOR_PIN) + self.assertRaises(BadgeOwnership.DoesNotExist, + BadgeOwnership.objects.get, + badge=badge, profile=self.user.profile) + + def test_donation_no_user(self): + donation = FakeDonation(False, None) + post_save.send(sender=Donation, created=False, instance=donation) + + badge = Badge.objects.get(numeric_id=bio.badges.CONTRIBUTOR_PIN) + self.assertRaises(BadgeOwnership.DoesNotExist, + BadgeOwnership.objects.get, + badge=badge, profile=self.user.profile) + + def test_donation_anon_and_no_user(self): + donation = FakeDonation(True, None) + post_save.send(sender=Donation, created=False, instance=donation) + + badge = Badge.objects.get(numeric_id=bio.badges.CONTRIBUTOR_PIN) + self.assertRaises(BadgeOwnership.DoesNotExist, + BadgeOwnership.objects.get, + badge=badge, profile=self.user.profile) + + def test_link_created(self): + link = FakeUserObject(self.user) + post_save.send(sender=Link, created=True, instance=link) + + badge = Badge.objects.get(numeric_id=bio.badges.LINK_PIN) + ownership = BadgeOwnership.objects.get(badge=badge, profile=self.user.profile) + self.assertEqual(ownership.count, 1) + + def test_link_updated(self): + link = FakeUserObject(self.user) + post_save.send(sender=Link, created=False, instance=link) + + badge = Badge.objects.get(numeric_id=bio.badges.LINK_PIN) + self.assertRaises(BadgeOwnership.DoesNotExist, + BadgeOwnership.objects.get, + badge=badge, profile=self.user.profile) + + def test_download_created(self): + dl = FakeUserObject(self.user) + post_save.send(sender=Download, created=True, instance=dl) + + badge = Badge.objects.get(numeric_id=bio.badges.DOWNLOAD_PIN) + ownership = BadgeOwnership.objects.get(badge=badge, profile=self.user.profile) + self.assertEqual(ownership.count, 1) + + def test_download_updated(self): + dl = FakeUserObject(self.user) + post_save.send(sender=Download, created=False, instance=dl) + + badge = Badge.objects.get(numeric_id=bio.badges.DOWNLOAD_PIN) + self.assertRaises(BadgeOwnership.DoesNotExist, + BadgeOwnership.objects.get, + badge=badge, profile=self.user.profile) + + def test_story_created(self): + story = FakeStory(self.user) + post_save.send(sender=Story, created=True, instance=story) + + badge = Badge.objects.get(numeric_id=bio.badges.NEWS_PIN) + ownership = BadgeOwnership.objects.get(badge=badge, profile=self.user.profile) + self.assertEqual(ownership.count, 1) + + def test_story_updated(self): + story = FakeStory(self.user) + post_save.send(sender=Story, created=False, instance=story) + + badge = Badge.objects.get(numeric_id=bio.badges.NEWS_PIN) + self.assertRaises(BadgeOwnership.DoesNotExist, + BadgeOwnership.objects.get, + badge=badge, profile=self.user.profile) + + def test_photo_created(self): + photo = FakeUserObject(self.user) + post_save.send(sender=Photo, created=True, instance=photo) + + badge = Badge.objects.get(numeric_id=bio.badges.POTD_PIN) + ownership = BadgeOwnership.objects.get(badge=badge, profile=self.user.profile) + self.assertEqual(ownership.count, 1) + + def test_photo_updated(self): + photo = FakeUserObject(self.user) + post_save.send(sender=Photo, created=False, instance=photo) + + badge = Badge.objects.get(numeric_id=bio.badges.POTD_PIN) + self.assertRaises(BadgeOwnership.DoesNotExist, + BadgeOwnership.objects.get, + badge=badge, profile=self.user.profile)
--- a/contests/models.py Wed May 13 20:27:17 2015 -0500 +++ b/contests/models.py Wed May 13 20:29:23 2015 -0500 @@ -30,7 +30,7 @@ title = models.CharField(max_length=64) slug = models.SlugField(max_length=64) description = models.TextField() - is_public = models.BooleanField(db_index=True) + is_public = models.BooleanField(default=False, db_index=True) creation_date = models.DateTimeField(blank=True) end_date = models.DateTimeField() contestants = models.ManyToManyField(User, related_name='contests',
--- a/core/markup.py Wed May 13 20:27:17 2015 -0500 +++ b/core/markup.py Wed May 13 20:29:23 2015 -0500 @@ -5,7 +5,7 @@ import markdown as _markdown from django.utils.encoding import force_unicode -from smiley import SmilifyMarkdown +from smiley.utils import SmilifyMarkdown from core.mdexts.urlize import UrlizeExtension from core.mdexts.deleted import DelExtension from core.html import clean_html
--- a/core/tests/test_ssl_images.py Wed May 13 20:27:17 2015 -0500 +++ b/core/tests/test_ssl_images.py Wed May 13 20:29:23 2015 -0500 @@ -123,8 +123,6 @@ self.assertEqual(expected, result) @mock.patch('core.management.commands.ssl_images.save_image_to_cloud') - @mock.patch('core.management.commands.ssl_images.check_https_availability', - new=lambda r: None) def test_simple_replacement(self, upload_mock): old_src = 'http://example.com/images/my_image.jpg' new_src = 'https://cloud.com/ABCDEF.jpg' @@ -138,11 +136,9 @@ upload_mock.return_value = new_src result = process_post(test_str) self.assertEqual(expected, result) - upload_mock.assert_called_once_with(old_src) + upload_mock.assert_called_once_with(urlparse(old_src)) @mock.patch('core.management.commands.ssl_images.save_image_to_cloud') - @mock.patch('core.management.commands.ssl_images.check_https_availability', - new=lambda r: None) def test_multiple_replacement(self, upload_mock): old_src = [ 'http://example.com/images/my_image.jpg', @@ -169,12 +165,10 @@ upload_mock.side_effect = new_src result = process_post(test_str) self.assertEqual(expected, result) - expected_args = [mock.call(c) for c in old_src] + expected_args = [mock.call(urlparse(c)) for c in old_src] self.assertEqual(upload_mock.call_args_list, expected_args) @mock.patch('core.management.commands.ssl_images.save_image_to_cloud') - @mock.patch('core.management.commands.ssl_images.check_https_availability', - new=lambda r: None) def test_multiple_replacement_2(self, upload_mock): old_src = [ 'http://example.com/images/my_image.jpg', @@ -203,11 +197,9 @@ upload_mock.side_effect = new_src result = process_post(test_str) self.assertEqual(expected, result) - upload_mock.assert_called_once_with(old_src[0]) + upload_mock.assert_called_once_with(urlparse(old_src[0])) @mock.patch('core.management.commands.ssl_images.save_image_to_cloud') - @mock.patch('core.management.commands.ssl_images.check_https_availability', - new=lambda r: None) def test_caching(self, upload_mock): old_src = [ 'http://example.com/images/my_image.jpg', @@ -234,39 +226,9 @@ upload_mock.side_effect = new_src result = process_post(test_str) self.assertEqual(expected, result) - expected_args = [mock.call(c) for c in old_src[:2]] + expected_args = [mock.call(urlparse(c)) for c in old_src[:2]] self.assertEqual(upload_mock.call_args_list, expected_args) - @mock.patch('core.management.commands.ssl_images.check_https_availability') - def test_https_availability(self, check_https_mock): - old_src = [ - 'http://example.com/images/my_image.jpg', - 'http://example.com/static/wow.gif', - 'http://example.com/images/another_image.jpg', - ] - new_src = [ - 'https://example.com/images/my_image.jpg', - 'https://example.com/static/wow.gif', - 'https://example.com/images/another_image.jpg', - ] - - template = """Here is a really cool http: based image: - ![flyer]({}) - Cool, right? - Another one: ![pic]({}) - And finally - ![an image]({}) - """ - - test_str = template.format(*old_src) - expected = template.format(*new_src) - - check_https_mock.side_effect = new_src - result = process_post(test_str) - self.assertEqual(expected, result) - expected_args = [mock.call(urlparse(c)) for c in old_src] - self.assertEqual(check_https_mock.call_args_list, expected_args) - class HtmlCheckTestCase(unittest.TestCase):
--- a/custom_search/__init__.py Wed May 13 20:27:17 2015 -0500 +++ b/custom_search/__init__.py Wed May 13 20:29:23 2015 -0500 @@ -0,0 +1,1 @@ +default_app_config = 'custom_search.apps.CustomSearchConfig'
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/custom_search/apps.py Wed May 13 20:29:23 2015 -0500 @@ -0,0 +1,9 @@ +from django.apps import AppConfig + + +class CustomSearchConfig(AppConfig): + name = 'custom_search' + verbose_name = 'Custom Search' + + def ready(self): + import custom_search.receivers
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/custom_search/fields.py Wed May 13 20:29:23 2015 -0500 @@ -0,0 +1,29 @@ +"""Custom Haystack SearchFields.""" + +import haystack.fields + + +class MaxTermSizeCharField(haystack.fields.CharField): + """A CharField that discards large terms when preparing the search index. + + Some backends (e.g. Xapian) throw errors when terms are bigger than some + limit. This field omits the terms over a limit when preparing the data for + the search index. + + The keyword argument max_term_size sets the maximum size of a whitespace + delimited word/term. Terms over this size are not indexed. The default value + is 64. + """ + DEFAULT_MAX_TERM_SIZE = 64 + + def __init__(self, *args, **kwargs): + self.max_term_size = kwargs.pop('max_term_size', self.DEFAULT_MAX_TERM_SIZE) + super(MaxTermSizeCharField, self).__init__(*args, **kwargs) + + def prepare(self, obj): + text = super(MaxTermSizeCharField, self).prepare(obj) + if text is None or self.max_term_size is None: + return text + + terms = (term for term in text.split() if len(term) <= self.max_term_size) + return u' '.join(terms)
--- a/custom_search/forms.py Wed May 13 20:27:17 2015 -0500 +++ b/custom_search/forms.py Wed May 13 20:29:23 2015 -0500 @@ -65,6 +65,12 @@ return self.cleaned_data + def clean_exact(self): + exact_field = self.cleaned_data['exact'] + if "'" in exact_field or '"' in exact_field: + raise forms.ValidationError("Quotes are not needed in this field") + return exact_field + def search(self): if not self.is_valid(): return self.no_query_found() @@ -83,24 +89,25 @@ self.cleaned_data['models'], username) - sqs = self.searchqueryset - # Note that in Haystack 2.x content is untrusted and is automatically # auto-escaped for us. # - # Filter on the q terms; these should be and'ed together: - terms = self.cleaned_data['q'].split() - for term in terms: - sqs = sqs.filter(content=term) + # Gather regular search terms + terms = ' '.join(self.cleaned_data['q'].split()) # Exact words or phrases: - if self.cleaned_data['exact']: - sqs = sqs.filter(content__exact=self.cleaned_data['exact']) + exact = self.cleaned_data['exact'].strip() + if exact: + exact = '"{}"'.format(exact) # Exclude terms: - terms = self.cleaned_data['exclude'].split() - for term in terms: - sqs = sqs.exclude(content=term) + exclude = ["-{}".format(term) for term in self.cleaned_data['exclude'].split()] + exclude = ' '.join(exclude) + + query = ' '.join([terms, exact, exclude]).strip() + logger.debug("auto_query: %s", query) + + sqs = self.searchqueryset.auto_query(query) if self.load_all: sqs = sqs.load_all()
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/custom_search/receivers.py Wed May 13 20:29:23 2015 -0500 @@ -0,0 +1,134 @@ +"""This module contains a custom Haystack signal processing class to update the +search index in realtime. We update our search index by enqueuing edits and +deletes into a queue for batch processing. Our class ensures we only enqueue +content that should be in the search index. + +""" +from django.db.models import signals +import queued_search.signals +import haystack + +from bio.signals import profile_content_update +from forums.signals import topic_content_update, post_content_update + +import ygroup.models +from weblinks.models import Link +from podcast.models import Item +from news.models import Story +from downloads.models import Download +from forums.models import Forum, Topic, Post +from bio.models import UserProfile + + +UID = 'custom_search.signals' + + +class QueuedSignalProcessor(queued_search.signals.QueuedSignalProcessor): + """ + This customized version of queued_search's QueuedSignalProcessor + conditionally enqueues items to be indexed. + + """ + def __init__(self, *args, **kwargs): + + # We assume that it is okay to attempt to delete a model from the search + # index even if the model object is not in the index. In other words, + # attempting to delete an object from the index will not cause any + # errors if it is not in the index. Thus if we see an object that has an + # 'is_public' attribute, and it is false, we can safely enqueue a delete + # in case the 'is_public' attribute just went from True to False. We + # have no way of knowing that, it could have been False all along, but we + # just try the delete in case to be safe. + + # To make the code easier to read, use a table to drive our signal + # connecting and disconnecting: + self.signal_chain = [ + # Yahoo Group posts are always updated: + (signals.post_save, ygroup.models.Post, self.enqueue_save), + (signals.post_delete, ygroup.models.Post, self.enqueue_delete), + + # Weblink Links are updated if they are public: + (signals.post_save, Link, self.enqueue_public_save), + (signals.post_delete, Link, self.enqueue_delete), + + # Podcast Items are always updated: + (signals.post_save, Item, self.enqueue_save), + (signals.post_delete, Item, self.enqueue_delete), + + # News Stories are always updated: + (signals.post_save, Story, self.enqueue_save), + (signals.post_delete, Story, self.enqueue_delete), + + # Downloads are updated if they are public: + (signals.post_save, Download, self.enqueue_public_save), + (signals.post_delete, Download, self.enqueue_delete), + + # Forum Topics are updated if they belong to a public forum: + (topic_content_update, None, self.enqueue_topic_save), + (signals.post_delete, Topic, self.enqueue_delete), + + # Forum Posts are updated if they belong to a public forum: + (post_content_update, None, self.enqueue_post_save), + (signals.post_delete, Post, self.enqueue_delete), + + # UserProfiles are updated when we receive a special signal: + (profile_content_update, None, self.enqueue_profile), + (signals.post_delete, UserProfile, self.enqueue_delete), + ] + + super(QueuedSignalProcessor, self).__init__(*args, **kwargs) + + def setup(self): + """We override setup() so we can attach signal handlers to only the + models we search on. In some cases we have custom signals to tell us + when to update the search index. + + """ + for signal, sender, receiver in self.signal_chain: + signal.connect(receiver, sender=sender, dispatch_uid=UID) + + def teardown(self): + """Disconnect all signals we previously connected.""" + for signal, sender, receiver in self.signal_chain: + signal.disconnect(receiver, sender=sender, dispatch_uid=UID) + + def enqueue_public_save(self, sender, instance, **kwargs): + """Index only if the instance is_public. + + If not, enqueue a delete just in case the is_public flag got flipped + from True to False. + + """ + if instance.is_public: + self.enqueue_save(sender, instance, **kwargs) + else: + self.enqueue_delete(sender, instance, **kwargs) + + def enqueue_topic_save(self, sender, **kwargs): + """Enqueue only if the topic instance belongs to a public forum.""" + if sender.forum.id in Forum.objects.public_forum_ids(): + self.enqueue_save(Topic, sender, **kwargs) + + def enqueue_post_save(self, sender, **kwargs): + """Enqueue only if the post instance belongs to a public forum.""" + if sender.topic.forum.id in Forum.objects.public_forum_ids(): + self.enqueue_save(Post, sender, **kwargs) + + def enqueue_profile(self, sender, **kwargs): + """Forward the user profile instance on unconditionally.""" + self.enqueue_save(UserProfile, sender, **kwargs) + + +# Starting with Django 1.7, we'd see Django generate warnings if we defined +# a HAYSTACK_SIGNAL_PROCESSOR in our settings that referenced the class above. +# This is because Haystack creates an instance of our signal processor class +# (defined above) at import time, and thus imports this module very early in the +# application startup sequence. Warnings are then generated when this module +# imports our models, some of whose applications have not been imported yet. +# This problem will presumably go away when Haystack can fully support Django +# 1.7.x and implements an AppConfig with a ready() method. Until then, we don't +# use Haystack's signal processor object; we'll just create one here. This +# module will be imported when our custom_search app's ready() method runs. + +signal_processor = QueuedSignalProcessor(haystack.connections, + haystack.connection_router)
--- a/custom_search/signals.py Wed May 13 20:27:17 2015 -0500 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,118 +0,0 @@ -"""This module contains a custom Haystack signal processing class to update the -search index in realtime. We update our search index by enqueuing edits and -deletes into a queue for batch processing. Our class ensures we only enqueue -content that should be in the search index. - -""" -from django.db.models import signals -import queued_search.signals - -from bio.signals import profile_content_update -from forums.signals import topic_content_update, post_content_update - -import ygroup.models -from weblinks.models import Link -from podcast.models import Item -from news.models import Story -from downloads.models import Download -from forums.models import Forum, Topic, Post -from bio.models import UserProfile - - -UID = 'custom_search.signals' - - -class QueuedSignalProcessor(queued_search.signals.QueuedSignalProcessor): - """ - This customized version of queued_search's QueuedSignalProcessor - conditionally enqueues items to be indexed. - - """ - def __init__(self, *args, **kwargs): - - # We assume that it is okay to attempt to delete a model from the search - # index even if the model object is not in the index. In other words, - # attempting to delete an object from the index will not cause any - # errors if it is not in the index. Thus if we see an object that has an - # 'is_public' attribute, and it is false, we can safely enqueue a delete - # in case the 'is_public' attribute just went from True to False. We - # have no way of knowing that, it could have been False all along, but we - # just try the delete in case to be safe. - - # To make the code easier to read, use a table to drive our signal - # connecting and disconnecting: - self.signal_chain = [ - # Yahoo Group posts are always updated: - (signals.post_save, ygroup.models.Post, self.enqueue_save), - (signals.post_delete, ygroup.models.Post, self.enqueue_delete), - - # Weblink Links are updated if they are public: - (signals.post_save, Link, self.enqueue_public_save), - (signals.post_delete, Link, self.enqueue_delete), - - # Podcast Items are always updated: - (signals.post_save, Item, self.enqueue_save), - (signals.post_delete, Item, self.enqueue_delete), - - # News Stories are always updated: - (signals.post_save, Story, self.enqueue_save), - (signals.post_delete, Story, self.enqueue_delete), - - # Downloads are updated if they are public: - (signals.post_save, Download, self.enqueue_public_save), - (signals.post_delete, Download, self.enqueue_delete), - - # Forum Topics are updated if they belong to a public forum: - (topic_content_update, None, self.enqueue_topic_save), - (signals.post_delete, Topic, self.enqueue_delete), - - # Forum Posts are updated if they belong to a public forum: - (post_content_update, None, self.enqueue_post_save), - (signals.post_delete, Post, self.enqueue_delete), - - # UserProfiles are updated when we receive a special signal: - (profile_content_update, None, self.enqueue_profile), - (signals.post_delete, UserProfile, self.enqueue_delete), - ] - - super(QueuedSignalProcessor, self).__init__(*args, **kwargs) - - def setup(self): - """We override setup() so we can attach signal handlers to only the - models we search on. In some cases we have custom signals to tell us - when to update the search index. - - """ - for signal, sender, receiver in self.signal_chain: - signal.connect(receiver, sender=sender, dispatch_uid=UID) - - def teardown(self): - """Disconnect all signals we previously connected.""" - for signal, sender, receiver in self.signal_chain: - signal.disconnect(receiver, sender=sender, dispatch_uid=UID) - - def enqueue_public_save(self, sender, instance, **kwargs): - """Index only if the instance is_public. - - If not, enqueue a delete just in case the is_public flag got flipped - from True to False. - - """ - if instance.is_public: - self.enqueue_save(sender, instance, **kwargs) - else: - self.enqueue_delete(sender, instance, **kwargs) - - def enqueue_topic_save(self, sender, **kwargs): - """Enqueue only if the topic instance belongs to a public forum.""" - if sender.forum.id in Forum.objects.public_forum_ids(): - self.enqueue_save(Topic, sender, **kwargs) - - def enqueue_post_save(self, sender, **kwargs): - """Enqueue only if the post instance belongs to a public forum.""" - if sender.topic.forum.id in Forum.objects.public_forum_ids(): - self.enqueue_save(Post, sender, **kwargs) - - def enqueue_profile(self, sender, **kwargs): - """Forward the user profile instance on unconditionally.""" - self.enqueue_save(UserProfile, sender, **kwargs)
--- a/donations/models.py Wed May 13 20:27:17 2015 -0500 +++ b/donations/models.py Wed May 13 20:29:23 2015 -0500 @@ -96,7 +96,7 @@ """Model to represent a donation to the website.""" user = models.ForeignKey(User, null=True, blank=True) - is_anonymous = models.BooleanField() + is_anonymous = models.BooleanField(default=False) test_ipn = models.BooleanField(default=False, verbose_name="Test IPN") txn_id = models.CharField(max_length=20, verbose_name="Txn ID") txn_type = models.CharField(max_length=64)
--- a/downloads/__init__.py Wed May 13 20:27:17 2015 -0500 +++ b/downloads/__init__.py Wed May 13 20:29:23 2015 -0500 @@ -1,1 +1,1 @@ -import signals +default_app_config = 'downloads.apps.DownloadsConfig'
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/downloads/apps.py Wed May 13 20:29:23 2015 -0500 @@ -0,0 +1,8 @@ +from django.apps import AppConfig + + +class DownloadsConfig(AppConfig): + name = 'downloads' + + def ready(self): + import downloads.receivers
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/downloads/receivers.py Wed May 13 20:29:23 2015 -0500 @@ -0,0 +1,63 @@ +"""Signal handlers for the downloads application. + +We use signals to compute the denormalized category counts whenever a download +is saved. + +""" +from django.db.models.signals import post_save +from django.db.models.signals import post_delete + +from downloads.models import Category, Download, PendingDownload + + +def on_download_save(sender, **kwargs): + """This function updates the count field for all categories. + It is called whenever a download is saved via a signal. + """ + if kwargs['created']: + # we only have to update the parent category + download = kwargs['instance'] + cat = download.category + cat.count = Download.public_objects.filter(category=cat).count() + cat.save() + else: + # update all categories just to be safe (an existing download could + # have been moved from one category to another + cats = Category.objects.all() + for cat in cats: + cat.count = Download.public_objects.filter(category=cat).count() + cat.save() + + +def on_download_delete(sender, **kwargs): + """This function updates the count field for the download's parent + category. It is called when a download is deleted via a signal. + + We now delete the uploaded file when the download is deleted. + """ + # update the parent category + download = kwargs['instance'] + cat = download.category + cat.count = Download.public_objects.filter(category=cat).count() + cat.save() + + # delete the actual file + if download.file: + download.file.delete(save=False) + + +def on_pending_download_delete(sender, **kwargs): + """Delete the uploaded file if it exists.""" + + download = kwargs['instance'] + # delete the actual file + if download.file: + download.file.delete(save=False) + + +post_save.connect(on_download_save, sender=Download, + dispatch_uid='downloads.receivers') +post_delete.connect(on_download_delete, sender=Download, + dispatch_uid='downloads.receivers') +post_delete.connect(on_pending_download_delete, sender=PendingDownload, + dispatch_uid='downloads.receivers')
--- a/downloads/search_indexes.py Wed May 13 20:27:17 2015 -0500 +++ b/downloads/search_indexes.py Wed May 13 20:29:23 2015 -0500 @@ -1,11 +1,12 @@ """Haystack search index for the downloads application.""" from haystack import indexes +from custom_search.fields import MaxTermSizeCharField from downloads.models import Download class DownloadIndex(indexes.SearchIndex, indexes.Indexable): - text = indexes.CharField(document=True, use_template=True) + text = MaxTermSizeCharField(document=True, use_template=True) author = indexes.CharField(model_attr='user') pub_date = indexes.DateTimeField(model_attr='date_added')
--- a/downloads/signals.py Wed May 13 20:27:17 2015 -0500 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,60 +0,0 @@ -"""Signals for the downloads application. -We use signals to compute the denormalized category counts whenever a download -is saved.""" -from django.db.models.signals import post_save -from django.db.models.signals import post_delete - -from downloads.models import Category, Download, PendingDownload - - -def on_download_save(sender, **kwargs): - """This function updates the count field for all categories. - It is called whenever a download is saved via a signal. - """ - if kwargs['created']: - # we only have to update the parent category - download = kwargs['instance'] - cat = download.category - cat.count = Download.public_objects.filter(category=cat).count() - cat.save() - else: - # update all categories just to be safe (an existing download could - # have been moved from one category to another - cats = Category.objects.all() - for cat in cats: - cat.count = Download.public_objects.filter(category=cat).count() - cat.save() - - -def on_download_delete(sender, **kwargs): - """This function updates the count field for the download's parent - category. It is called when a download is deleted via a signal. - - We now delete the uploaded file when the download is deleted. - """ - # update the parent category - download = kwargs['instance'] - cat = download.category - cat.count = Download.public_objects.filter(category=cat).count() - cat.save() - - # delete the actual file - if download.file: - download.file.delete(save=False) - - -def on_pending_download_delete(sender, **kwargs): - """Delete the uploaded file if it exists.""" - - download = kwargs['instance'] - # delete the actual file - if download.file: - download.file.delete(save=False) - - -post_save.connect(on_download_save, sender=Download, - dispatch_uid='downloads.signals') -post_delete.connect(on_download_delete, sender=Download, - dispatch_uid='downloads.signals') -post_delete.connect(on_pending_download_delete, sender=PendingDownload, - dispatch_uid='downloads.signals')
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/downloads/tests/test_receivers.py Wed May 13 20:29:23 2015 -0500 @@ -0,0 +1,71 @@ +"""Tests for the downloads app signal handlers.""" + +from django.contrib.auth.models import User +from django.test import TestCase + +from mock import Mock + +import custom_search.receivers + +from downloads.models import Category +from downloads.models import Download +from downloads.models import PendingDownload + + +class ReceiverTestCase(TestCase): + + fixtures = ['downloads_categories.json'] + + def setUp(self): + self.user = User.objects.create_user('user', 'user@example.com', 'pw') + + # Don't let our custom search signal handler class catch any of the + # signals we are throwing here. + custom_search.receivers.signal_processor.teardown() + + def tearDown(self): + custom_search.receivers.signal_processor.setup() + + def test_download_signal_handlers(self): + + category = Category.objects.get(pk=1) + dl = Download(category=category, + title='Title', + description='Cool stuff', + is_public=True, + user=self.user, + ip_address='127.0.0.1') + dl.save() + + category = Category.objects.get(pk=1) + self.assertEqual(1, category.count) + + category2 = Category.objects.get(pk=4) + dl.category = category2 + dl.save() + + category = Category.objects.get(pk=1) + self.assertEqual(0, category.count) + category2 = Category.objects.get(pk=4) + self.assertEqual(1, category2.count) + + dl.delete() + category2 = Category.objects.get(pk=4) + self.assertEqual(0, category2.count) + + def test_pending_download_signal_handlers(self): + + category = Category.objects.get(pk=1) + dl = PendingDownload( + category=category, + title='Title', + description='Cool stuff', + user=self.user, + ip_address='127.0.0.1') + dl.save() + + dl.file = Mock() + dl.delete() + + dl.file.delete.assert_called_with(save=False) +
--- a/forums/__init__.py Wed May 13 20:27:17 2015 -0500 +++ b/forums/__init__.py Wed May 13 20:29:23 2015 -0500 @@ -1,2 +1,1 @@ -import signals -import latest +default_app_config = 'forums.apps.ForumsConfig'
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/forums/apps.py Wed May 13 20:29:23 2015 -0500 @@ -0,0 +1,9 @@ +from django.apps import AppConfig + + +class ForumsConfig(AppConfig): + name = 'forums' + + def ready(self): + import forums.receivers + import forums.latest
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/forums/receivers.py Wed May 13 20:29:23 2015 -0500 @@ -0,0 +1,60 @@ +""" +Signal handlers for the forums application. + +""" +from django.db.models.signals import post_save +from django.db.models.signals import post_delete + +from forums.models import Forum, Topic, Post +import forums.latest + + +def on_topic_save(sender, **kwargs): + if kwargs['created']: + topic = kwargs['instance'] + topic.forum.topic_count_update() + topic.forum.save() + + +def on_topic_delete(sender, **kwargs): + topic = kwargs['instance'] + topic.forum.topic_count_update() + topic.forum.save() + forums.latest.notify_topic_delete(topic) + + +def on_post_save(sender, **kwargs): + if kwargs['created']: + post = kwargs['instance'] + + # update the topic + post.topic.post_count_update() + post.topic.save() + + # update the forum + post.topic.forum.post_count_update() + post.topic.forum.save() + + +def on_post_delete(sender, **kwargs): + post = kwargs['instance'] + + # update the topic + try: + post.topic.post_count_update() + post.topic.save() + except Topic.DoesNotExist: + pass + else: + # update the forum + try: + post.topic.forum.post_count_update() + post.topic.forum.save() + except Forum.DoesNotExist: + pass + + +post_save.connect(on_topic_save, sender=Topic, dispatch_uid='forums.receivers') +post_delete.connect(on_topic_delete, sender=Topic, dispatch_uid='forums.receivers') +post_save.connect(on_post_save, sender=Post, dispatch_uid='forums.receivers') +post_delete.connect(on_post_delete, sender=Post, dispatch_uid='forums.receivers')
--- a/forums/search_indexes.py Wed May 13 20:27:17 2015 -0500 +++ b/forums/search_indexes.py Wed May 13 20:29:23 2015 -0500 @@ -1,6 +1,7 @@ """Haystack search index for the weblinks application.""" from haystack import indexes +from custom_search.fields import MaxTermSizeCharField from forums.models import Forum, Topic, Post @@ -20,7 +21,7 @@ class PostIndex(indexes.SearchIndex, indexes.Indexable): - text = indexes.CharField(document=True, use_template=True) + text = MaxTermSizeCharField(document=True, use_template=True) author = indexes.CharField(model_attr='user') pub_date = indexes.DateTimeField(model_attr='creation_date')
--- a/forums/signals.py Wed May 13 20:27:17 2015 -0500 +++ b/forums/signals.py Wed May 13 20:29:23 2015 -0500 @@ -1,68 +1,10 @@ """ -Signal handlers & signals for the forums application. +Signals for the forums application. """ -from django.db.models.signals import post_save -from django.db.models.signals import post_delete import django.dispatch -from forums.models import Forum, Topic, Post - -def on_topic_save(sender, **kwargs): - if kwargs['created']: - topic = kwargs['instance'] - topic.forum.topic_count_update() - topic.forum.save() - - -def on_topic_delete(sender, **kwargs): - topic = kwargs['instance'] - topic.forum.topic_count_update() - topic.forum.save() - forums.latest.notify_topic_delete(topic) - - -def on_post_save(sender, **kwargs): - if kwargs['created']: - post = kwargs['instance'] - - # update the topic - post.topic.post_count_update() - post.topic.save() - - # update the forum - post.topic.forum.post_count_update() - post.topic.forum.save() - - -def on_post_delete(sender, **kwargs): - post = kwargs['instance'] - - # update the topic - try: - post.topic.post_count_update() - post.topic.save() - except Topic.DoesNotExist: - pass - else: - # update the forum - try: - post.topic.forum.post_count_update() - post.topic.forum.save() - except Forum.DoesNotExist: - pass - - -post_save.connect(on_topic_save, sender=Topic, dispatch_uid='forums.signals') -post_delete.connect(on_topic_delete, sender=Topic, dispatch_uid='forums.signals') - -post_save.connect(on_post_save, sender=Post, dispatch_uid='forums.signals') -post_delete.connect(on_post_delete, sender=Post, dispatch_uid='forums.signals') - - -# Signals for the forums application. -# # This signal is sent when a topic has had its textual content (title) changed. # The provided arguments are: # sender - the topic model instance @@ -108,7 +50,3 @@ """ post_content_update.send_robust(post, created=False) - - -# Avoid circular imports -import forums.latest
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/forums/tests/test_receivers.py Wed May 13 20:29:23 2015 -0500 @@ -0,0 +1,63 @@ +"""Tests for the forum app's signal handlers.""" + +from django.contrib.auth.models import User +from django.test import TestCase + +from mock import patch + +import custom_search.receivers + +from forums.models import Forum +from forums.models import Post +from forums.models import Topic + + +class ReceiverTestCase(TestCase): + + fixtures = ['forums.json'] + + def setUp(self): + self.user = User.objects.create_user('user', 'user@example.com', 'pw') + + # Don't let our custom search signal handler class catch any of the + # signals we are throwing here. + custom_search.receivers.signal_processor.teardown() + + def tearDown(self): + custom_search.receivers.signal_processor.setup() + + @patch('forums.latest.notify_topic_delete') + def test_signal_handlers(self, mock_latest): + forum = Forum.objects.get(pk=2) + topic = Topic(forum=forum, + name='Test Topic', + user=self.user) + topic.save() + forum = Forum.objects.get(pk=2) + self.assertEqual(1, forum.topic_count) + + post = Post(topic=topic, user=self.user, body='test') + post.save() + + topic = Topic.objects.get(pk=topic.pk) + self.assertEqual(topic.post_count, 1) + forum = Forum.objects.get(pk=2) + self.assertEqual(forum.post_count, 1) + + # To prevent cascading deletes + topic.last_post_pre_delete() + topic.save() + forum.last_post_pre_delete() + forum.save() + + post.delete() + topic = Topic.objects.get(pk=topic.pk) + self.assertEqual(topic.post_count, 0) + forum = Forum.objects.get(pk=2) + self.assertEqual(forum.post_count, 0) + + topic.delete() + forum = Forum.objects.get(pk=2) + self.assertEqual(0, forum.topic_count) + + mock_latest.assert_called_once_with(topic)
--- a/gcalendar/calendar.py Wed May 13 20:27:17 2015 -0500 +++ b/gcalendar/calendar.py Wed May 13 20:29:23 2015 -0500 @@ -119,9 +119,9 @@ def _make_time(self, date, time=None, tz_name=None): """ - Returns the gdata formatted date/time string given a date, optional time, - and optional time zone name (e.g. 'US/Pacific'). If the time zone name is None, - no time zone info will be added to the string. + Returns the formatted date/time string given a date, optional time, and + optional time zone name (e.g. 'US/Pacific'). If the time zone name is + None, no time zone info will be added to the string. """ if time:
--- a/messages/__init__.py Wed May 13 20:27:17 2015 -0500 +++ b/messages/__init__.py Wed May 13 20:29:23 2015 -0500 @@ -1,1 +1,2 @@ +default_app_config = 'messages.apps.MessagesConfig' MSG_BOX_LIMIT = 30 # hard limit on # of msgs per box
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/messages/apps.py Wed May 13 20:29:23 2015 -0500 @@ -0,0 +1,6 @@ +from django.apps import AppConfig + + +class MessagesConfig(AppConfig): + name = 'messages' + verbose_name = 'Private Messages'
--- a/news/search_indexes.py Wed May 13 20:27:17 2015 -0500 +++ b/news/search_indexes.py Wed May 13 20:29:23 2015 -0500 @@ -1,11 +1,12 @@ """Haystack search index for the news application.""" from haystack import indexes +from custom_search.fields import MaxTermSizeCharField from news.models import Story class StoryIndex(indexes.SearchIndex, indexes.Indexable): - text = indexes.CharField(document=True, use_template=True) + text = MaxTermSizeCharField(document=True, use_template=True) author = indexes.CharField(model_attr='submitter') pub_date = indexes.DateTimeField(model_attr='date_submitted')
--- a/podcast/search_indexes.py Wed May 13 20:27:17 2015 -0500 +++ b/podcast/search_indexes.py Wed May 13 20:29:23 2015 -0500 @@ -1,11 +1,12 @@ """Haystack search index for the news application.""" from haystack import indexes +from custom_search.fields import MaxTermSizeCharField from podcast.models import Item class ItemIndex(indexes.SearchIndex, indexes.Indexable): - text = indexes.CharField(document=True, use_template=True) + text = MaxTermSizeCharField(document=True, use_template=True) author = indexes.CharField(model_attr='author') pub_date = indexes.DateTimeField(model_attr='pubdate')
--- a/potd/__init__.py Wed May 13 20:27:17 2015 -0500 +++ b/potd/__init__.py Wed May 13 20:29:23 2015 -0500 @@ -1,1 +1,1 @@ -import signals +default_app_config = 'potd.apps.PotdConfig'
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/potd/apps.py Wed May 13 20:29:23 2015 -0500 @@ -0,0 +1,10 @@ +from django.apps import AppConfig + + +class PotdConfig(AppConfig): + name = 'potd' + verbose_name = 'Photo of the Day' + label = 'potd' + + def ready(self): + import potd.receivers
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/potd/receivers.py Wed May 13 20:29:23 2015 -0500 @@ -0,0 +1,31 @@ +""" +Signal handlers for the potd application. + +""" +from django.db.models.signals import post_save, post_delete + +from potd.models import Photo, Sequence + + +def on_photo_save(sender, **kwargs): + """ + This function is executed when a Photo is saved. It inserts the photo into + the current sequence. + + """ + photo = kwargs['instance'] + Sequence.objects.insert_photo(photo.pk) + + +def on_photo_delete(sender, **kwargs): + """ + This function is executed when a Photo is deleted. It removes the photo from + the current sequence of photos. + + """ + photo = kwargs['instance'] + Sequence.objects.remove_photo(photo.pk) + + +post_save.connect(on_photo_save, sender=Photo, dispatch_uid='potd.receivers') +post_delete.connect(on_photo_delete, sender=Photo, dispatch_uid='potd.receivers')
--- a/potd/signals.py Wed May 13 20:27:17 2015 -0500 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,31 +0,0 @@ -""" -Signal handlers for the potd application. - -""" -from django.db.models.signals import post_save, post_delete - -from potd.models import Photo, Sequence - - -def on_photo_save(sender, **kwargs): - """ - This function is executed when a Photo is saved. It inserts the photo into - the current sequence. - - """ - photo = kwargs['instance'] - Sequence.objects.insert_photo(photo.pk) - - -def on_photo_delete(sender, **kwargs): - """ - This function is executed when a Photo is deleted. It removes the photo from - the current sequence of photos. - - """ - photo = kwargs['instance'] - Sequence.objects.remove_photo(photo.pk) - - -post_save.connect(on_photo_save, sender=Photo, dispatch_uid='potd.signals') -post_delete.connect(on_photo_delete, sender=Photo, dispatch_uid='potd.signals')
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/potd/tests/test_receivers.py Wed May 13 20:29:23 2015 -0500 @@ -0,0 +1,34 @@ +"""Tests for potd signal handlers.""" +from django.contrib.auth.models import User +from django.test import TestCase + +from mock import Mock + +from potd.models import Photo, Sequence, Current + + +class PotdSignalRcvrTestCase(TestCase): + + fixtures = ['potd_test.json'] + + def test_on_photo_save(self): + user = User.objects.get(pk=1) + photo = Photo(photo='/tmp/1.jpg', + caption='caption', + description='desc', + user=user) + photo.generate_thumb = Mock() + photo.save() + + current = Current.objects.get_current_id() + self.assertTrue(current != photo.pk) + + seq = Sequence.objects.get(pk=1) + expected = '1,{},2,3'.format(photo.pk) + self.assertEqual(seq.seq, expected) + + def test_on_photo_delete(self): + photo = Photo.objects.get(pk=2) + photo.delete() + seq = Sequence.objects.get(pk=1) + self.assertEqual(seq.seq, '1,3')
--- a/requirements_dev.txt Wed May 13 20:27:17 2015 -0500 +++ b/requirements_dev.txt Wed May 13 20:29:23 2015 -0500 @@ -1,7 +1,7 @@ -Django==1.6.6 +Django==1.7.7 Markdown==2.5.1 MySQL-python==1.2.5 -django-debug-toolbar==1.0 +django-debug-toolbar==1.3.0 -e git+https://github.com/gremmie/django-elsewhere.git@1203bd331aba4c5d4e702cc4e64d807310f2b591#egg=django_elsewhere-master django-haystack==2.1.0 django-tagging==0.3.1 @@ -28,7 +28,7 @@ ftfy==2.0.1 Pillow==2.7.0 boto==2.13.0 -sqlparse==0.1.10 +sqlparse==0.1.14 billiard==3.3.0.13 google-api-python-client==1.3.1 httplib2==0.9 @@ -39,6 +39,8 @@ simplejson==3.6.5 uritemplate==0.6 mock==1.0.1 +lxml==3.4.2 +testfixtures==4.1.2 # # These packages I punted on and hacked into my virtualenv by # symlinking to the global site-packages:
--- a/sg101/apache/sg101.wsgi Wed May 13 20:27:17 2015 -0500 +++ b/sg101/apache/sg101.wsgi Wed May 13 20:29:23 2015 -0500 @@ -35,8 +35,8 @@ if not OFFLINE: os.environ['DJANGO_SETTINGS_MODULE'] = 'sg101.settings.production' - import django.core.handlers.wsgi - application = django.core.handlers.wsgi.WSGIHandler() + from django.core.wsgi import get_wsgi_application + application = get_wsgi_application() else: application = offline_handler
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/sg101/apps.py Wed May 13 20:29:23 2015 -0500 @@ -0,0 +1,8 @@ +from django.contrib.messages.apps import MessagesConfig + + +# The Django messages app conflicts with our own messages app. +# Override the label to fix this. + +class DjangoMessagesConfig(MessagesConfig): + label = 'django_messages'
--- a/sg101/settings/base.py Wed May 13 20:27:17 2015 -0500 +++ b/sg101/settings/base.py Wed May 13 20:29:23 2015 -0500 @@ -103,7 +103,7 @@ 'django.contrib.contenttypes', 'django.contrib.flatpages', 'django.contrib.humanize', - 'django.contrib.messages', + 'sg101.apps.DjangoMessagesConfig', 'django.contrib.sessions', 'django.contrib.sites', 'django.contrib.staticfiles', @@ -190,8 +190,6 @@ ####################################################################### # Haystack Search Settings ####################################################################### -HAYSTACK_SIGNAL_PROCESSOR = 'custom_search.signals.QueuedSignalProcessor' - HAYSTACK_CONNECTIONS = { 'default': { 'ENGINE': 'xapian_backend.XapianEngine', @@ -350,3 +348,6 @@ ], } + +# Turn off warning about test runner behavior change +SILENCED_SYSTEM_CHECKS = ['1_6.W001']
--- a/sg101/settings/local.py Wed May 13 20:27:17 2015 -0500 +++ b/sg101/settings/local.py Wed May 13 20:29:23 2015 -0500 @@ -32,7 +32,8 @@ 'debug_toolbar.middleware.DebugToolbarMiddleware') INSTALLED_APPS.append('debug_toolbar') DEBUG_TOOLBAR_CONFIG = { - 'INTERCEPT_REDIRECTS': True, + 'DISABLE_PANELS': set(), + 'JQUERY_URL': '', } # Logging configuration
--- a/sg101/templates/core/admin_dashboard.html Wed May 13 20:27:17 2015 -0500 +++ b/sg101/templates/core/admin_dashboard.html Wed May 13 20:29:23 2015 -0500 @@ -1,5 +1,6 @@ {% if user.is_staff %} {% if flagged_posts or flagged_comments or flagged_profiles or event_requests or new_stories or new_downloads or new_links or flagged_shouts or broken_links or flagged_msgs or new_bands %} +<div class="clear"> <ul id="dashboard-list"> {% if flagged_posts %} <li><a href="/admin/forums/flaggedpost/">Posts</a>: {{ flagged_posts }}</li> @@ -35,5 +36,6 @@ <li><a href="/admin/bandmap/bandentry/?is_approved__exact=0">Band Map</a>: {{ new_bands }}</li> {% endif %} </ul> +</div> {% endif %} {% endif %}
--- a/sg101/urls.py Wed May 13 20:27:17 2015 -0500 +++ b/sg101/urls.py Wed May 13 20:29:23 2015 -0500 @@ -15,8 +15,6 @@ from core.views import FixedView -admin.autodiscover() - urlpatterns = patterns('', url(r'^$', TemplateView.as_view(template_name='home.html'),
--- a/shoutbox/models.py Wed May 13 20:27:17 2015 -0500 +++ b/shoutbox/models.py Wed May 13 20:29:23 2015 -0500 @@ -7,7 +7,7 @@ from django.contrib.auth.models import User from django.utils.html import escape, urlize -from smiley import smilify_html +from smiley.utils import smilify_html class Shout(models.Model):
--- a/smiley/__init__.py Wed May 13 20:27:17 2015 -0500 +++ b/smiley/__init__.py Wed May 13 20:29:23 2015 -0500 @@ -1,70 +0,0 @@ -""" -Smiley classes and functions. - -""" -from django.utils.safestring import SafeData -from django.utils.html import conditional_escape - -from smiley.models import Smiley - - -class SmilifyHtml(object): - """ - A class to "smilify" text by replacing text with HTML img tags for smiley - images. - """ - def __init__(self): - self.map = Smiley.objects.get_smiley_map() - - def convert(self, value, autoescape=False): - """ - Converts and returns the supplied text with the HTML version of the - smileys. - """ - if not value: - return u'' - - if not autoescape or isinstance(value, SafeData): - esc = lambda x: x - else: - esc = conditional_escape - - words = value.split() - for i, word in enumerate(words): - if word in self.map: - words[i] = self.map[word] - else: - words[i] = esc(words[i]) - return u' '.join(words) - - -class SmilifyMarkdown(object): - """ - A class to "smilify" text by replacing text with Markdown image syntax for - smiley images. - """ - def __init__(self, relative_urls=True): - self.regexes = Smiley.objects.get_smiley_regexes( - relative_urls=relative_urls) - - def convert(self, s): - """ - Returns a string copy of the input s that has the smiley codes replaced - with Markdown for smiley images. - """ - if not s: - return u'' - - for regex, repl in self.regexes: - s = regex.sub(repl, s) - return s - - -def smilify_html(value, autoescape=False): - """ - A convenience function to "smilify" text by replacing text with HTML - img tags of smilies. - """ - s = SmilifyHtml() - return s.convert(value, autoescape=autoescape) -
--- a/smiley/models.py Wed May 13 20:27:17 2015 -0500 +++ b/smiley/models.py Wed May 13 20:29:23 2015 -0500 @@ -64,7 +64,7 @@ image = models.ImageField(upload_to='smiley/images/') title = models.CharField(max_length=32) code = models.CharField(max_length=32) - is_extra = models.BooleanField() + is_extra = models.BooleanField(default=False) objects = SmileyManager()
--- a/smiley/templatetags/smiley_tags.py Wed May 13 20:27:17 2015 -0500 +++ b/smiley/templatetags/smiley_tags.py Wed May 13 20:29:23 2015 -0500 @@ -1,11 +1,12 @@ """ -Template tags for the smiley application. +Template tags for the smiley application. """ from django import template from django.template.defaultfilters import stringfilter from django.utils.safestring import mark_safe from smiley.models import Smiley +from smiley.utils import smilify_html register = template.Library() @@ -14,7 +15,6 @@ @stringfilter def smiley_html(value, autoescape=False): """A filter to "smilify" text by replacing text with HTML img tags of smilies.""" - from smiley import smilify_html return mark_safe(smilify_html(value, autoescape=autoescape)) smiley_html.needs_autoescape = True
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/smiley/utils.py Wed May 13 20:29:23 2015 -0500 @@ -0,0 +1,69 @@ +""" +Smiley classes and functions. + +""" +from django.utils.safestring import SafeData +from django.utils.html import conditional_escape + +from models import Smiley + + +class SmilifyHtml(object): + """ + A class to "smilify" text by replacing text with HTML img tags for smiley + images. + """ + def __init__(self): + self.map = Smiley.objects.get_smiley_map() + + def convert(self, value, autoescape=False): + """ + Converts and returns the supplied text with the HTML version of the + smileys. + """ + if not value: + return u'' + + if not autoescape or isinstance(value, SafeData): + esc = lambda x: x + else: + esc = conditional_escape + + words = value.split() + for i, word in enumerate(words): + if word in self.map: + words[i] = self.map[word] + else: + words[i] = esc(words[i]) + return u' '.join(words) + + +class SmilifyMarkdown(object): + """ + A class to "smilify" text by replacing text with Markdown image syntax for + smiley images. + """ + def __init__(self, relative_urls=True): + self.regexes = Smiley.objects.get_smiley_regexes( + relative_urls=relative_urls) + + def convert(self, s): + """ + Returns a string copy of the input s that has the smiley codes replaced + with Markdown for smiley images. + """ + if not s: + return u'' + + for regex, repl in self.regexes: + s = regex.sub(repl, s) + return s + + +def smilify_html(value, autoescape=False): + """ + A convenience function to "smilify" text by replacing text with HTML + img tags of smilies. + """ + s = SmilifyHtml() + return s.convert(value, autoescape=autoescape)
--- a/weblinks/__init__.py Wed May 13 20:27:17 2015 -0500 +++ b/weblinks/__init__.py Wed May 13 20:29:23 2015 -0500 @@ -1,1 +1,1 @@ -import signals +default_app_config = 'weblinks.apps.WeblinksConfig'
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/weblinks/apps.py Wed May 13 20:29:23 2015 -0500 @@ -0,0 +1,9 @@ +from django.apps import AppConfig + + +class WeblinksConfig(AppConfig): + name = 'weblinks' + verbose_name = 'Web Links' + + def ready(self): + import weblinks.receivers
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/weblinks/receivers.py Wed May 13 20:29:23 2015 -0500 @@ -0,0 +1,43 @@ +"""Signal handlers for the weblinks application. + +We use signals to compute the denormalized category counts whenever a weblink +is saved. +""" +from django.db.models.signals import post_save +from django.db.models.signals import post_delete + +from weblinks.models import Category, Link + + +def on_link_save(sender, **kwargs): + """This function updates the count field for all categories. + It is called whenever a link is saved via a signal. + """ + if kwargs['created']: + # we only have to update the parent category + link = kwargs['instance'] + cat = link.category + cat.count = Link.public_objects.filter(category=cat).count() + cat.save() + else: + # update all categories just to be safe (an existing link could + # have been moved from one category to another + cats = Category.objects.all() + for cat in cats: + cat.count = Link.public_objects.filter(category=cat).count() + cat.save() + + +def on_link_delete(sender, **kwargs): + """This function updates the count field for the link's parent + category. It is called when a link is deleted via a signal. + """ + # update the parent category + link = kwargs['instance'] + cat = link.category + cat.count = Link.public_objects.filter(category=cat).count() + cat.save() + + +post_save.connect(on_link_save, sender=Link, dispatch_uid='weblinks.receivers') +post_delete.connect(on_link_delete, sender=Link, dispatch_uid='weblinks.receivers')
--- a/weblinks/search_indexes.py Wed May 13 20:27:17 2015 -0500 +++ b/weblinks/search_indexes.py Wed May 13 20:29:23 2015 -0500 @@ -1,11 +1,12 @@ """Haystack search index for the weblinks application.""" from haystack import indexes +from custom_search.fields import MaxTermSizeCharField from weblinks.models import Link class LinkIndex(indexes.SearchIndex, indexes.Indexable): - text = indexes.CharField(document=True, use_template=True) + text = MaxTermSizeCharField(document=True, use_template=True) author = indexes.CharField(model_attr='user') pub_date = indexes.DateTimeField(model_attr='date_added')
--- a/weblinks/signals.py Wed May 13 20:27:17 2015 -0500 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,41 +0,0 @@ -"""Signals for the weblinks application. -We use signals to compute the denormalized category counts whenever a weblink -is saved.""" -from django.db.models.signals import post_save -from django.db.models.signals import post_delete - -from weblinks.models import Category, Link - - -def on_link_save(sender, **kwargs): - """This function updates the count field for all categories. - It is called whenever a link is saved via a signal. - """ - if kwargs['created']: - # we only have to update the parent category - link = kwargs['instance'] - cat = link.category - cat.count = Link.public_objects.filter(category=cat).count() - cat.save() - else: - # update all categories just to be safe (an existing link could - # have been moved from one category to another - cats = Category.objects.all() - for cat in cats: - cat.count = Link.public_objects.filter(category=cat).count() - cat.save() - - -def on_link_delete(sender, **kwargs): - """This function updates the count field for the link's parent - category. It is called when a link is deleted via a signal. - """ - # update the parent category - link = kwargs['instance'] - cat = link.category - cat.count = Link.public_objects.filter(category=cat).count() - cat.save() - - -post_save.connect(on_link_save, sender=Link, dispatch_uid='weblinks.signals') -post_delete.connect(on_link_delete, sender=Link, dispatch_uid='weblinks.signals')
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/weblinks/tests/test_receivers.py Wed May 13 20:29:23 2015 -0500 @@ -0,0 +1,51 @@ +"""Tests for the weblink app signal handlers.""" + +from django.contrib.auth.models import User +from django.test import TestCase + +import custom_search.receivers + +from weblinks.models import Category +from weblinks.models import Link + + +class ReceiverTestCase(TestCase): + + fixtures = ['weblinks_categories.json'] + + def setUp(self): + self.user = User.objects.create_user('user', 'user@example.com', 'pw') + + # Don't let our custom search signal handler class catch any of the + # signals we are throwing here. + custom_search.receivers.signal_processor.teardown() + + def tearDown(self): + custom_search.receivers.signal_processor.setup() + + def test_signal_handlers(self): + + category = Category.objects.get(pk=1) + link = Link(category=category, + title='Title', + url='http://example.com/', + description='Cool stuff', + is_public=True, + user=self.user) + link.save() + + category = Category.objects.get(pk=1) + self.assertEqual(1, category.count) + + category2 = Category.objects.get(pk=4) + link.category = category2 + link.save() + + category = Category.objects.get(pk=1) + self.assertEqual(0, category.count) + category2 = Category.objects.get(pk=4) + self.assertEqual(1, category2.count) + + link.delete() + category2 = Category.objects.get(pk=4) + self.assertEqual(0, category2.count)
--- a/wiki/__init__.py Wed May 13 20:27:17 2015 -0500 +++ b/wiki/__init__.py Wed May 13 20:29:23 2015 -0500 @@ -0,0 +1,1 @@ +default_app_config = 'wiki.apps.WikiConfig'
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/wiki/apps.py Wed May 13 20:29:23 2015 -0500 @@ -0,0 +1,8 @@ +from django.apps import AppConfig + + +class WikiConfig(AppConfig): + name = 'wiki' + + def ready(self): + import wiki.receivers
--- a/wiki/models.py Wed May 13 20:27:17 2015 -0500 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,8 +0,0 @@ -"""The wiki application integrates an external Wiki app with our Django -application. - -The wiki application has no models. It consists of some signals and -middleware only. - -""" -import wiki.signals
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/wiki/receivers.py Wed May 13 20:29:23 2015 -0500 @@ -0,0 +1,46 @@ +"""Signal handlers for wiki integration. + +We are interested in hearing about users logging in and out, so we can create +and destroy an external cookie to allow access to the wiki. + +""" +import logging + +from django.contrib.auth.signals import user_logged_in, user_logged_out + +from wiki.constants import SESSION_SET_MEMBER + +logger = logging.getLogger(__name__) + + +def login_callback(sender, request, user, **kwargs): + """Signal callback function for a user logging in. + + Sets a flag for the middleware to create an external cookie. + + """ + logger.info('User login: %s', user.username) + + request.wiki_set_cookie = True + + +def logout_callback(sender, request, user, **kwargs): + """Signal callback function for a user logging in. + + Sets a flag for the middleware to delete the external cookie. + + Since the user is about to logout, her session will be wiped out after + this function returns. This forces us to set an attribute on the request + object so that the response middleware can delete the wiki's cookie. + + """ + if user: + logger.info('User logout: %s', user.username) + + # Remember what Redis set member to delete by adding an attribute to the + # request object: + request.wiki_delete_cookie = request.session.get(SESSION_SET_MEMBER) + + +user_logged_in.connect(login_callback, dispatch_uid='wiki.receivers.login') +user_logged_out.connect(logout_callback, dispatch_uid='wiki.receivers.logout')
--- a/wiki/signals.py Wed May 13 20:27:17 2015 -0500 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,46 +0,0 @@ -"""Signal handlers for wiki integration. - -We are interested in hearing about users logging in and out, so we can create -and destroy an external cookie to allow access to the wiki. - -""" -import logging - -from django.contrib.auth.signals import user_logged_in, user_logged_out - -from wiki.constants import SESSION_SET_MEMBER - -logger = logging.getLogger(__name__) - - -def login_callback(sender, request, user, **kwargs): - """Signal callback function for a user logging in. - - Sets a flag for the middleware to create an external cookie. - - """ - logger.info('User login: %s', user.username) - - request.wiki_set_cookie = True - - -def logout_callback(sender, request, user, **kwargs): - """Signal callback function for a user logging in. - - Sets a flag for the middleware to delete the external cookie. - - Since the user is about to logout, her session will be wiped out after - this function returns. This forces us to set an attribute on the request - object so that the response middleware can delete the wiki's cookie. - - """ - if user: - logger.info('User logout: %s', user.username) - - # Remember what Redis set member to delete by adding an attribute to the - # request object: - request.wiki_delete_cookie = request.session.get(SESSION_SET_MEMBER) - - -user_logged_in.connect(login_callback, dispatch_uid='wiki.signals.login') -user_logged_out.connect(logout_callback, dispatch_uid='wiki.signals.logout')
--- a/wiki/tests.py Wed May 13 20:27:17 2015 -0500 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,92 +0,0 @@ -""" -Tests for the wiki integration application. - -""" -import hashlib -import datetime - -from django.contrib.auth.models import User -from django.test import TestCase -from django.test.client import RequestFactory -from django.http import HttpResponse -from django.conf import settings - -from core.services import get_redis_connection -from wiki.middleware import WikiMiddleware -from wiki.constants import SESSION_SET_MEMBER - - -class MiddleWareTestCase(TestCase): - - def setUp(self): - self.factory = RequestFactory() - self.user = User.objects.create_user('test_user', 'test@example.com', - 'password') - self.conn = get_redis_connection() - self.mw = WikiMiddleware() - - def tearDown(self): - self.conn.delete(settings.WIKI_REDIS_SET) - - def create_request(self): - request = self.factory.get('/contact/') - request.session = {} - request.user = self.user - return request - - def test_middleware(self): - - request = self.create_request() - response = HttpResponse() - - request.wiki_set_cookie = True - response = self.mw.process_response(request, response) - - cookie = response.cookies.get(settings.WIKI_COOKIE_NAME) - cookie_val = '' - self.assertIsNotNone(cookie) - if cookie: - self.assertEqual(cookie['domain'], settings.WIKI_COOKIE_DOMAIN) - self.assertEqual(cookie['path'], '/') - self.assertEqual(cookie['max-age'], settings.WIKI_COOKIE_AGE) - - cookie_val = cookie.value - try: - user, email, key = cookie_val.split('#') - except ValueError: - self.fail('invalid cookie value') - else: - self.assertEqual(user, self.user.username) - self.assertEqual(email, self.user.email) - self.assertEqual(len(key), 64) - - self.assertEqual(self.conn.zcard(settings.WIKI_REDIS_SET), 1) - - h = hashlib.sha256() - h.update(cookie_val) - member = h.hexdigest() - - score = self.conn.zscore(settings.WIKI_REDIS_SET, member) - now = datetime.datetime.utcnow() - session_start = datetime.datetime.fromtimestamp(score) - self.assertLess(now - session_start, datetime.timedelta(seconds=2)) - - session_member = request.session.get(SESSION_SET_MEMBER) - self.assertTrue(session_member and session_member == member) - - # test the destroy session logic - - request = self.create_request() - request.wiki_delete_cookie = member - response = self.mw.process_response(request, response) - - cookie = response.cookies.get(settings.WIKI_COOKIE_NAME) - self.assertIsNotNone(cookie) - if cookie: - self.assertEqual(cookie.value, '') - self.assertEqual(cookie['domain'], settings.WIKI_COOKIE_DOMAIN) - self.assertEqual(cookie['path'], '/') - self.assertEqual(cookie['max-age'], 0) - self.assertEqual(cookie['expires'], 'Thu, 01-Jan-1970 00:00:00 GMT') - - self.assertEqual(self.conn.zcard(settings.WIKI_REDIS_SET), 0)
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/wiki/tests/test_middleware.py Wed May 13 20:29:23 2015 -0500 @@ -0,0 +1,92 @@ +""" +Tests for the wiki integration application. + +""" +import hashlib +import datetime + +from django.contrib.auth.models import User +from django.test import TestCase +from django.test.client import RequestFactory +from django.http import HttpResponse +from django.conf import settings + +from core.services import get_redis_connection +from wiki.middleware import WikiMiddleware +from wiki.constants import SESSION_SET_MEMBER + + +class MiddleWareTestCase(TestCase): + + def setUp(self): + self.factory = RequestFactory() + self.user = User.objects.create_user('test_user', 'test@example.com', + 'password') + self.conn = get_redis_connection() + self.mw = WikiMiddleware() + + def tearDown(self): + self.conn.delete(settings.WIKI_REDIS_SET) + + def create_request(self): + request = self.factory.get('/contact/') + request.session = {} + request.user = self.user + return request + + def test_middleware(self): + + request = self.create_request() + response = HttpResponse() + + request.wiki_set_cookie = True + response = self.mw.process_response(request, response) + + cookie = response.cookies.get(settings.WIKI_COOKIE_NAME) + cookie_val = '' + self.assertIsNotNone(cookie) + if cookie: + self.assertEqual(cookie['domain'], settings.WIKI_COOKIE_DOMAIN) + self.assertEqual(cookie['path'], '/') + self.assertEqual(cookie['max-age'], settings.WIKI_COOKIE_AGE) + + cookie_val = cookie.value + try: + user, email, key = cookie_val.split('#') + except ValueError: + self.fail('invalid cookie value') + else: + self.assertEqual(user, self.user.username) + self.assertEqual(email, self.user.email) + self.assertEqual(len(key), 64) + + self.assertEqual(self.conn.zcard(settings.WIKI_REDIS_SET), 1) + + h = hashlib.sha256() + h.update(cookie_val) + member = h.hexdigest() + + score = self.conn.zscore(settings.WIKI_REDIS_SET, member) + now = datetime.datetime.utcnow() + session_start = datetime.datetime.fromtimestamp(score) + self.assertLess(now - session_start, datetime.timedelta(seconds=2)) + + session_member = request.session.get(SESSION_SET_MEMBER) + self.assertTrue(session_member and session_member == member) + + # test the destroy session logic + + request = self.create_request() + request.wiki_delete_cookie = member + response = self.mw.process_response(request, response) + + cookie = response.cookies.get(settings.WIKI_COOKIE_NAME) + self.assertIsNotNone(cookie) + if cookie: + self.assertEqual(cookie.value, '') + self.assertEqual(cookie['domain'], settings.WIKI_COOKIE_DOMAIN) + self.assertEqual(cookie['path'], '/') + self.assertEqual(cookie['max-age'], 0) + self.assertEqual(cookie['expires'], 'Thu, 01-Jan-1970 00:00:00 GMT') + + self.assertEqual(self.conn.zcard(settings.WIKI_REDIS_SET), 0)
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/wiki/tests/test_receivers.py Wed May 13 20:29:23 2015 -0500 @@ -0,0 +1,32 @@ +"""Tests for the wiki app's signal handlers.""" +import logging + +from django.contrib.auth.models import User +from django.test import TestCase + +from testfixtures import log_capture + + +class ReceiverTestCase(TestCase): + + def setUp(self): + self.user = User.objects.create_user('user', 'user@example.com', 'pw') + + # Temporarily enable logging + self.old_disable = logging.getLogger().manager.disable + logging.disable(logging.NOTSET) + + def tearDown(self): + logging.disable(self.old_disable) + + @log_capture('wiki.receivers') + def test_signal_handlers(self, lc): + # We don't have access to the dummy request that the test client creates + # when logging in, so we can't really check to see if we added + # attributes to the request object. But that code is pretty simple, so + # lets just test that we logged something so we know our signal handlers + # are hooked up and running. + self.client.login(username='user', password='pw') + self.client.logout() + lc.check(('wiki.receivers', 'INFO', 'User login: user'), + ('wiki.receivers', 'INFO', 'User logout: user'))
--- a/ygroup/search_indexes.py Wed May 13 20:27:17 2015 -0500 +++ b/ygroup/search_indexes.py Wed May 13 20:29:23 2015 -0500 @@ -4,11 +4,12 @@ """ from haystack import indexes +from custom_search.fields import MaxTermSizeCharField from ygroup.models import Post class PostIndex(indexes.SearchIndex, indexes.Indexable): - text = indexes.CharField(document=True, use_template=True) + text = MaxTermSizeCharField(document=True, use_template=True) pub_date = indexes.DateTimeField(model_attr='creation_date') def get_model(self):