changeset 750:aeafbf3ecebf

For #63, upgrade to celery 3.1.7.
author Brian Neal <bgneal@gmail.com>
date Tue, 31 Dec 2013 16:36:22 -0600
parents b6e98717690b
children 22fb12361fb3
files accounts/tasks.py core/tasks.py custom_search/tasks.py forums/tasks.py messages/tasks.py potd/tasks.py requirements_dev.txt sg101/__init__.py sg101/apache/sg101.wsgi sg101/celery.py sg101/settings/base.py wiki/tasks.py
diffstat 12 files changed, 70 insertions(+), 42 deletions(-) [+]
line wrap: on
line diff
--- a/accounts/tasks.py	Mon Dec 30 15:05:43 2013 -0600
+++ b/accounts/tasks.py	Tue Dec 31 16:36:22 2013 -0600
@@ -2,12 +2,14 @@
 Celery tasks for the accounts application.
 
 """
-from celery.task import task
+from __future__ import absolute_import
+
+from celery import shared_task
 
 from accounts.stats import update_user_stats
 
 
-@task
+@shared_task
 def user_stats_task(user_id):
     """
     Run the update_user_stats() function on a new task.
--- a/core/tasks.py	Mon Dec 30 15:05:43 2013 -0600
+++ b/core/tasks.py	Tue Dec 31 16:36:22 2013 -0600
@@ -2,22 +2,15 @@
 Celery tasks for the core application.
 
 """
-from celery.task import task
+from __future__ import absolute_import
+
+from celery import shared_task
 import django.core.mail
 
 import core.whos_online
 
 
-@task
-def add(x, y):
-    """
-    It is useful to have a test task laying around. This is it.
-
-    """
-    return x + y
-
-
-@task
+@shared_task
 def send_mail(subject, message, from_email, recipient_list, **kwargs):
     """
     A task to send mail via Django.
@@ -27,7 +20,7 @@
             **kwargs)
 
 
-@task
+@shared_task
 def cleanup():
     """
     A task to perform site-wide cleanup actions.
@@ -51,7 +44,7 @@
     command.execute()
 
 
-@task
+@shared_task
 def max_users():
     """
     Run the periodic task to calculate the who's online max users/visitors
--- a/custom_search/tasks.py	Mon Dec 30 15:05:43 2013 -0600
+++ b/custom_search/tasks.py	Tue Dec 31 16:36:22 2013 -0600
@@ -2,13 +2,15 @@
 Tasks for our custom search application.
 
 """
+from __future__ import absolute_import
+
+from celery import shared_task
 from django.conf import settings
-from celery.task import task
 
 from queued_search.management.commands.process_search_queue import Command
 
 
-@task
+@shared_task
 def process_search_queue_task():
     """
     Celery task to run the queued_search application's process_search_queue
--- a/forums/tasks.py	Mon Dec 30 15:05:43 2013 -0600
+++ b/forums/tasks.py	Tue Dec 31 16:36:22 2013 -0600
@@ -2,12 +2,14 @@
 Celery tasks for the forums application.
 
 """
-from celery.task import task
+from __future__ import absolute_import
+
+from celery import shared_task
 
 import forums.latest
 
 
-@task
+@shared_task
 def new_post_task(post_id):
     """
     This task performs new post processing on a Celery task.
@@ -16,7 +18,7 @@
     forums.latest.process_new_post(post_id)
 
 
-@task
+@shared_task
 def updated_post_task(post_id):
     """
     This task performs updated post processing on a Celery task.
@@ -25,7 +27,7 @@
     forums.latest.process_updated_post(post_id)
 
 
-@task
+@shared_task
 def new_topic_task(topic_id):
     """
     This task performs new topic processing on a Celery task.
@@ -34,7 +36,7 @@
     forums.latest.process_new_topic(topic_id)
 
 
-@task
+@shared_task
 def updated_topic_task(topic_id):
     """
     This task performs updated topic processing on a Celery task.
--- a/messages/tasks.py	Mon Dec 30 15:05:43 2013 -0600
+++ b/messages/tasks.py	Tue Dec 31 16:36:22 2013 -0600
@@ -2,10 +2,12 @@
 Celery tasks for the messages application.
 
 """
-from celery.task import task
+from __future__ import absolute_import
 
+from celery import shared_task
 
-@task
+
+@shared_task
 def purge_messages():
     """
     Task to purge messages that have been deleted by both sender & receiver.
--- a/potd/tasks.py	Mon Dec 30 15:05:43 2013 -0600
+++ b/potd/tasks.py	Tue Dec 31 16:36:22 2013 -0600
@@ -2,11 +2,13 @@
 Celery tasks for the POTD app.
 
 """
-from celery.task import task
+from __future__ import absolute_import
+
+from celery import shared_task
 
 import potd.tools
 
 
-@task
+@shared_task
 def pick_potd():
     potd.tools.pick_potd()
--- a/requirements_dev.txt	Mon Dec 30 15:05:43 2013 -0600
+++ b/requirements_dev.txt	Tue Dec 31 16:36:22 2013 -0600
@@ -14,16 +14,15 @@
 repoze.timeago==0.5
 xapian-haystack==1.1.5beta
 anyjson==0.3.3
-celery==2.4.5
-django-celery==2.4.2
+celery==3.1.7
 django-picklefield==0.3.1
-kombu==2.5.10
+kombu==3.0.8
 python-dateutil==1.5
 python-memcached==1.48
 wsgiref==0.1.2
 python-ts3==0.1
 docutils==0.10
-amqp==1.0.11
+amqp==1.3.3
 Fabric==1.4.1
 argparse==1.2.1
 six==1.4.1
@@ -34,6 +33,7 @@
 Pillow==2.1.0
 boto==2.13.0
 sqlparse==0.1.10
+billiard==3.3.0.13
 #
 # These packages I punted on and hacked into my virtualenv by
 # symlinking to the global site-packages:
--- a/sg101/__init__.py	Mon Dec 30 15:05:43 2013 -0600
+++ b/sg101/__init__.py	Tue Dec 31 16:36:22 2013 -0600
@@ -0,0 +1,4 @@
+# Celery integration: these lines ensures the celery app is always imported when
+# Django starts so that shared_task can find the app.
+from __future__ import absolute_import
+from .celery import app as celery_app
--- a/sg101/apache/sg101.wsgi	Mon Dec 30 15:05:43 2013 -0600
+++ b/sg101/apache/sg101.wsgi	Tue Dec 31 16:36:22 2013 -0600
@@ -11,7 +11,6 @@
 sys.path.append('/svr/django-sites/sg101/sg101/tools')
 
 os.environ['PYTHON_EGG_CACHE'] = '/svr/django-sites/sg101/eggs'
-os.environ['CELERY_LOADER'] = 'django'
 
 
 def offline_handler(environ, start_response):
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/sg101/celery.py	Tue Dec 31 16:36:22 2013 -0600
@@ -0,0 +1,26 @@
+"""Django / celery integration module."""
+
+from __future__ import absolute_import
+import os
+import platform
+
+from celery import Celery
+from django.conf import settings
+
+
+role = 'production' if platform.node() == 'jaguar' else 'local'
+settings_val = 'sg101.settings.{}'.format(role)
+
+# set the default Django settings module for the 'celery' program.
+os.environ.setdefault('DJANGO_SETTINGS_MODULE', settings_val)
+
+app = Celery('sg101')
+
+app.config_from_object('django.conf:settings')
+app.autodiscover_tasks(lambda: settings.INSTALLED_APPS)
+
+# It's useful to have a debug task laying around:
+
+@app.task(bind=True)
+def debug_task(self):
+    print('Request: {0!r}'.format(self.request))
--- a/sg101/settings/base.py	Mon Dec 30 15:05:43 2013 -0600
+++ b/sg101/settings/base.py	Tue Dec 31 16:36:22 2013 -0600
@@ -5,7 +5,6 @@
 from decimal import Decimal
 
 from django.contrib.messages import constants as message_constants
-import djcelery
 from celery.schedules import crontab
 
 
@@ -110,7 +109,6 @@
     'django.contrib.sessions',
     'django.contrib.sites',
     'django.contrib.staticfiles',
-    'djcelery',
     'elsewhere',
     'haystack',
     'queued_search',
@@ -212,17 +210,13 @@
 BROKER_URL = 'redis://localhost:6379/1'
 BROKER_POOL_LIMIT = 10
 
+CELERY_TIMEZONE = TIME_ZONE
+CELERY_TASK_SERIALIZER = 'json'
+CELERY_ACCEPT_CONTENT = ['json']
 CELERY_IGNORE_RESULT = True
-CELERY_RESULT_BACKEND = 'redis'
-CELERY_REDIS_HOST = 'localhost'
-CELERY_REDIS_PORT = 6379
-CELERY_REDIS_DB = 1
-
 CELERY_DISABLE_RATE_LIMITS = True
 CELERY_SEND_TASK_ERROR_EMAILS = True
 
-djcelery.setup_loader()
-
 CELERYBEAT_SCHEDULE = {
     "potd": {
         "task": "potd.tasks.pick_potd",
--- a/wiki/tasks.py	Mon Dec 30 15:05:43 2013 -0600
+++ b/wiki/tasks.py	Tue Dec 31 16:36:22 2013 -0600
@@ -2,11 +2,13 @@
 Celery tasks for the wiki app.
 
 """
+from __future__ import absolute_import
+
 import datetime
 import logging
 import time
 
-from celery.task import task
+from celery import shared_task
 from django.conf import settings
 import redis
 
@@ -16,7 +18,7 @@
 logger = logging.getLogger(__name__)
 
 
-@task
+@shared_task
 def expire_cookies():
     """
     Periodically run this task to remove expired cookies from the Redis set