annotate forums/latest.py @ 1177:e9f6a2c5c1de

Daylight savings time fix
author Brian Neal <bgneal@gmail.com>
date Sun, 10 Mar 2019 14:03:47 -0500
parents 90e8cc6eff77
children
rev   line source
bgneal@509 1 """
bgneal@509 2 This module maintains the latest posts datastore. The latest posts are often
bgneal@509 3 needed by RSS feeds, "latest posts" template tags, etc. This module listens for
bgneal@509 4 the post_content_update signal, then bundles the post up and stores it by forum
bgneal@509 5 ID in Redis. We also maintain a combined forums list. This allows quick
bgneal@509 6 retrieval of the latest posts and avoids some slow SQL queries.
bgneal@509 7
bgneal@522 8 We also do things like send topic notification emails, auto-favorite, and
bgneal@522 9 auto-subscribe functions here rather than bog the user down in the request /
bgneal@522 10 response cycle.
bgneal@522 11
bgneal@509 12 """
bgneal@595 13 # Maintenance notes:
bgneal@595 14 # How we use Redis in this module:
bgneal@595 15 #
bgneal@595 16 # Forum post processing:
bgneal@595 17 #
bgneal@595 18 # * Forum posts are turned into Python dictionaries, then converted to JSON and
bgneal@595 19 # stored under keys: forums:post:id
bgneal@595 20 # * Each forum has a list in Redis stored under the key: forums:rss:id. This
bgneal@595 21 # is a list of post IDs.
bgneal@595 22 # * There is also a key called forums:rss:* which is the combined latest
bgneal@595 23 # feed. It is also a list of post IDs.
bgneal@595 24 # * A sorted set is maintained that keeps track of the reference count for each
bgneal@595 25 # post. When a new post is created, this reference count is 2 because it is
bgneal@595 26 # stored in both the combined list and the parent forum list.
bgneal@595 27 # This sorted set is stored under the key: forums:post_ref_cnt.
bgneal@595 28 # * When a post falls off a list due to aging, the reference count in the
bgneal@595 29 # ordered set is decremented. If it falls to zero, the post's key is deleted
bgneal@595 30 # from Redis.
bgneal@595 31 # * When a post is edited, and it is in Redis, we simply update the JSON
bgneal@595 32 # content.
bgneal@595 33 # * When a post is deleted, and it is in Redis, it is removed from the 2 lists,
bgneal@595 34 # the ordered set, and deleted from Redis.
bgneal@595 35 # * When the RSS feed wants to update, it simply pulls down the entire list of
bgneal@595 36 # post IDs for the feed of interest, then does a get on all the posts.
bgneal@595 37 #
bgneal@595 38 # Topics with recent posts processing:
bgneal@595 39 #
bgneal@595 40 # * A key is created for each topic that is updated.
bgneal@595 41 # * An ordered set of topics is maintained with the current time as the score.
bgneal@595 42 # * An updated topic gets its score bumped.
bgneal@595 43 # * We only allow MAX_UPDATED_TOPICS number of topics in the set. We sort the
bgneal@595 44 # set by score, and the expired topics are removed from the set and their keys
bgneal@595 45 # are deleted from Redis.
bgneal@595 46 # * The template tag (or anyone) who wants the list of topics with new posts
bgneal@595 47 # gets the list of IDs sorted by score from newest to oldest. An mget is then
bgneal@595 48 # performed to get all the topic data and it is deserialized from JSON.
bgneal@595 49 #
bgneal@595 50 # We also maintain topic and post counts in Redis since select(*) can take a
bgneal@595 51 # while with MySQL InnoDb.
bgneal@595 52 #
bgneal@509 53 import datetime
bgneal@679 54 import json
bgneal@522 55 import logging
bgneal@509 56 import time
bgneal@509 57
bgneal@1177 58 import dateutil.parser
bgneal@1168 59 from django.conf import settings
bgneal@509 60 from django.dispatch import receiver
bgneal@594 61 from django.template.loader import render_to_string
bgneal@1177 62 from django.utils.timezone import get_default_timezone, make_aware
bgneal@1168 63 import pytz
bgneal@523 64 import redis
bgneal@509 65
bgneal@522 66 from forums.signals import post_content_update, topic_content_update
bgneal@594 67 from forums.models import Forum, Topic, Post, Attachment
bgneal@522 68 from forums.views.subscriptions import notify_topic_subscribers
bgneal@522 69 from forums.tools import auto_favorite, auto_subscribe
bgneal@509 70 from core.services import get_redis_connection
bgneal@792 71 from core.markup import site_markup
bgneal@509 72
bgneal@509 73 # This constant controls how many latest posts per forum we store
bgneal@509 74 MAX_POSTS = 50
bgneal@509 75
bgneal@522 76 # This controls how many updated topics we track
bgneal@522 77 MAX_UPDATED_TOPICS = 50
bgneal@522 78
bgneal@1168 79 SERVER_TZ = pytz.timezone(settings.TIME_ZONE)
bgneal@1168 80
bgneal@522 81 # Redis key names:
bgneal@522 82 POST_COUNT_KEY = "forums:public_post_count"
bgneal@522 83 TOPIC_COUNT_KEY = "forums:public_topic_count"
bgneal@522 84 UPDATED_TOPICS_SET_KEY = "forums:updated_topics:set"
bgneal@522 85 UPDATED_TOPIC_KEY = "forums:updated_topics:%s"
bgneal@595 86 POST_KEY = "forums:post:%s"
bgneal@595 87 FORUM_RSS_KEY = "forums:rss:%s"
bgneal@595 88 ALL_FORUMS_RSS_KEY = "forums:rss:*"
bgneal@595 89 POST_SET_KEY = "forums:post_ref_cnt"
bgneal@522 90
bgneal@522 91 logger = logging.getLogger(__name__)
bgneal@522 92
bgneal@509 93
bgneal@509 94 @receiver(post_content_update, dispatch_uid='forums.latest_posts')
bgneal@509 95 def on_post_update(sender, **kwargs):
bgneal@509 96 """
bgneal@595 97 This function is our signal handler, called when a post has been updated
bgneal@595 98 or created.
bgneal@509 99
bgneal@522 100 We kick off a Celery task to perform work outside of the request/response
bgneal@522 101 cycle.
bgneal@509 102
bgneal@509 103 """
bgneal@595 104 if kwargs['created']:
bgneal@595 105 forums.tasks.new_post_task.delay(sender.id)
bgneal@595 106 else:
bgneal@595 107 forums.tasks.updated_post_task.delay(sender.id)
bgneal@522 108
bgneal@522 109
bgneal@522 110 def process_new_post(post_id):
bgneal@522 111 """
bgneal@522 112 This function is run on a Celery task. It performs all new-post processing.
bgneal@522 113
bgneal@522 114 """
bgneal@522 115 try:
bgneal@522 116 post = Post.objects.select_related().get(pk=post_id)
bgneal@522 117 except Post.DoesNotExist:
bgneal@522 118 logger.warning("process_new_post: post %d does not exist", post_id)
bgneal@509 119 return
bgneal@509 120
bgneal@522 121 # selectively process posts from non-public forums
bgneal@522 122 public_forums = Forum.objects.public_forum_ids()
bgneal@522 123
bgneal@522 124 if post.topic.forum.id in public_forums:
bgneal@523 125 conn = get_redis_connection()
bgneal@523 126 _update_post_feeds(conn, post)
bgneal@523 127 _update_post_count(conn, public_forums)
bgneal@523 128 _update_latest_topics(conn, post)
bgneal@522 129
bgneal@522 130 # send out any email notifications
bgneal@522 131 notify_topic_subscribers(post, defer=False)
bgneal@522 132
bgneal@522 133 # perform any auto-favorite and auto-subscribe actions for the new post
bgneal@522 134 auto_favorite(post)
bgneal@522 135 auto_subscribe(post)
bgneal@522 136
bgneal@522 137
bgneal@595 138 def process_updated_post(post_id):
bgneal@595 139 """
bgneal@595 140 This function is run on a Celery task. It performs all updated-post
bgneal@595 141 processing.
bgneal@595 142
bgneal@595 143 """
bgneal@595 144 # Is this post ID in a RSS feed?
bgneal@595 145 conn = get_redis_connection()
bgneal@595 146 post_key = POST_KEY % post_id
bgneal@595 147 post_val = conn.get(post_key)
bgneal@595 148
bgneal@595 149 if post_val is not None:
bgneal@595 150 # Update the post value in Redis
bgneal@595 151 try:
bgneal@595 152 post = Post.objects.select_related().get(pk=post_id)
bgneal@595 153 except Post.DoesNotExist:
bgneal@595 154 logger.warning("process_updated_post: post %d does not exist", post_id)
bgneal@595 155 return
bgneal@595 156 conn.set(post_key, _serialize_post(post))
bgneal@595 157
bgneal@595 158
bgneal@523 159 def _update_post_feeds(conn, post):
bgneal@522 160 """
bgneal@522 161 Updates the forum feeds we keep in Redis so that our RSS feeds are quick.
bgneal@522 162
bgneal@522 163 """
bgneal@595 164 post_key = POST_KEY % post.id
bgneal@595 165 post_value = _serialize_post(post)
bgneal@509 166
bgneal@523 167 pipeline = conn.pipeline()
bgneal@509 168
bgneal@595 169 # Store serialized post content under its own key
bgneal@595 170 pipeline.set(post_key, post_value)
bgneal@509 171
bgneal@595 172 # Store in the RSS feed for the post's forum
bgneal@595 173 forum_key = FORUM_RSS_KEY % post.topic.forum.id
bgneal@595 174 pipeline.lpush(forum_key, post.id)
bgneal@509 175
bgneal@595 176 # Store in the RSS feed for combined forums
bgneal@595 177 pipeline.lpush(ALL_FORUMS_RSS_KEY, post.id)
bgneal@509 178
bgneal@595 179 # Store reference count for the post
bgneal@595 180 pipeline.zadd(POST_SET_KEY, 2, post.id)
bgneal@509 181
bgneal@595 182 results = pipeline.execute()
bgneal@509 183
bgneal@595 184 # Make sure our forums RSS lists lengths are not exceeded
bgneal@595 185
bgneal@595 186 if results[1] > MAX_POSTS or results[2] > MAX_POSTS:
bgneal@595 187 pipeline = conn.pipeline()
bgneal@595 188
bgneal@595 189 # Truncate lists of posts:
bgneal@595 190 if results[1] > MAX_POSTS:
bgneal@595 191 pipeline.rpop(forum_key)
bgneal@595 192 if results[2] > MAX_POSTS:
bgneal@595 193 pipeline.rpop(ALL_FORUMS_RSS_KEY)
bgneal@595 194 post_ids = pipeline.execute()
bgneal@595 195
bgneal@595 196 # Decrement reference count(s)
bgneal@595 197 pipeline = conn.pipeline()
bgneal@595 198 for post_id in post_ids:
bgneal@595 199 pipeline.zincrby(POST_SET_KEY, post_id, -1)
bgneal@595 200 scores = pipeline.execute()
bgneal@595 201
bgneal@595 202 # If any reference counts have fallen to 0, clean up:
bgneal@595 203 if not all(scores):
bgneal@595 204 pipeline = conn.pipeline()
bgneal@595 205
bgneal@595 206 # remove from post set
bgneal@595 207 ids = [post_ids[n] for n, s in enumerate(scores) if s <= 0.0]
bgneal@595 208 pipeline.zrem(POST_SET_KEY, *ids)
bgneal@595 209
bgneal@595 210 # remove serialized post data
bgneal@595 211 keys = [POST_KEY % n for n in ids]
bgneal@595 212 pipeline.delete(*keys)
bgneal@595 213
bgneal@595 214 pipeline.execute()
bgneal@509 215
bgneal@509 216
bgneal@523 217 def _update_post_count(conn, public_forums):
bgneal@522 218 """
bgneal@522 219 Updates the post count we cache in Redis. Doing a COUNT(*) on the post table
bgneal@522 220 can be expensive in MySQL InnoDB.
bgneal@522 221
bgneal@522 222 """
bgneal@523 223 result = conn.incr(POST_COUNT_KEY)
bgneal@522 224 if result == 1:
bgneal@522 225 # it is likely redis got trashed, so re-compute the correct value
bgneal@522 226
bgneal@522 227 count = Post.objects.filter(topic__forum__in=public_forums).count()
bgneal@523 228 conn.set(POST_COUNT_KEY, count)
bgneal@522 229
bgneal@522 230
bgneal@523 231 def _update_latest_topics(conn, post):
bgneal@522 232 """
bgneal@522 233 Updates the "latest topics with new posts" list we cache in Redis for speed.
bgneal@522 234 There is a template tag and forum view that uses this information.
bgneal@522 235
bgneal@522 236 """
bgneal@522 237 # serialize topic attributes
bgneal@522 238 topic_id = post.topic.id
bgneal@522 239 topic_score = int(time.mktime(post.creation_date.timetuple()))
bgneal@522 240
bgneal@522 241 topic_content = {
bgneal@522 242 'title': post.topic.name,
bgneal@522 243 'author': post.user.username,
bgneal@522 244 'date': topic_score,
bgneal@529 245 'url': post.topic.get_latest_post_url()
bgneal@522 246 }
bgneal@679 247 topic_json = json.dumps(topic_content)
bgneal@522 248 key = UPDATED_TOPIC_KEY % topic_id
bgneal@522 249
bgneal@523 250 pipeline = conn.pipeline()
bgneal@679 251 pipeline.set(key, topic_json)
bgneal@522 252 pipeline.zadd(UPDATED_TOPICS_SET_KEY, topic_score, topic_id)
bgneal@522 253 pipeline.zcard(UPDATED_TOPICS_SET_KEY)
bgneal@522 254 results = pipeline.execute()
bgneal@522 255
bgneal@522 256 # delete topics beyond our maximum count
bgneal@522 257 num_topics = results[-1]
bgneal@522 258 num_to_del = num_topics - MAX_UPDATED_TOPICS
bgneal@522 259 if num_to_del > 0:
bgneal@522 260 # get the IDs of the topics we need to delete first
bgneal@522 261 start = 0
bgneal@522 262 stop = num_to_del - 1 # Redis indices are inclusive
bgneal@523 263 old_ids = conn.zrange(UPDATED_TOPICS_SET_KEY, start, stop)
bgneal@522 264
bgneal@522 265 keys = [UPDATED_TOPIC_KEY % n for n in old_ids]
bgneal@523 266 conn.delete(*keys)
bgneal@522 267
bgneal@522 268 # now delete the oldest num_to_del topics
bgneal@523 269 conn.zremrangebyrank(UPDATED_TOPICS_SET_KEY, start, stop)
bgneal@522 270
bgneal@522 271
bgneal@509 272 def get_latest_posts(num_posts=MAX_POSTS, forum_id=None):
bgneal@509 273 """
bgneal@509 274 This function retrieves num_posts latest posts for the forum with the given
bgneal@509 275 forum_id. If forum_id is None, the posts are retrieved from the combined
bgneal@509 276 forums datastore. A list of dictionaries is returned. Each dictionary
bgneal@509 277 contains information about a post.
bgneal@509 278
bgneal@509 279 """
bgneal@595 280 key = FORUM_RSS_KEY % forum_id if forum_id else ALL_FORUMS_RSS_KEY
bgneal@509 281
bgneal@509 282 num_posts = max(0, min(MAX_POSTS, num_posts))
bgneal@509 283
bgneal@509 284 if num_posts == 0:
bgneal@509 285 return []
bgneal@509 286
bgneal@523 287 conn = get_redis_connection()
bgneal@595 288 post_ids = conn.lrange(key, 0, num_posts - 1)
bgneal@595 289 if not post_ids:
bgneal@595 290 return []
bgneal@595 291
bgneal@595 292 post_keys = [POST_KEY % n for n in post_ids]
bgneal@595 293 raw_posts = conn.mget(post_keys)
bgneal@595 294 raw_posts = [s for s in raw_posts if s is not None]
bgneal@509 295
bgneal@509 296 posts = []
bgneal@509 297 for raw_post in raw_posts:
bgneal@679 298 post = json.loads(raw_post)
bgneal@509 299
bgneal@509 300 # fix up the pubdate; turn it back into a datetime object
bgneal@1177 301 post['pubdate'] = _deserialize_date(post['pubdate'])
bgneal@509 302
bgneal@509 303 posts.append(post)
bgneal@509 304
bgneal@509 305 return posts
bgneal@522 306
bgneal@522 307
bgneal@1177 308 def _deserialize_date(pubdate):
bgneal@1177 309 if isinstance(pubdate, (int, long)):
bgneal@1177 310 # legacy data, fix up and watch out for timezone glitches
bgneal@1177 311 new_date = datetime.datetime.utcfromtimestamp(pubdate)
bgneal@1177 312 new_date.replace(tzinfo=SERVER_TZ)
bgneal@1177 313
bgneal@1177 314 tz = get_default_timezone()
bgneal@1177 315 try:
bgneal@1177 316 make_aware(new_date, tz)
bgneal@1177 317 except pytz.NonExistentTimeError:
bgneal@1177 318 new_date += datetime.timedelta(hours=1)
bgneal@1177 319 return new_date
bgneal@1177 320
bgneal@1177 321 return dateutil.parser.parse(pubdate)
bgneal@1177 322
bgneal@1177 323
bgneal@522 324 @receiver(topic_content_update, dispatch_uid='forums.latest_posts')
bgneal@522 325 def on_topic_update(sender, **kwargs):
bgneal@522 326 """
bgneal@595 327 This function is our signal handler, called when a topic has been updated
bgneal@595 328 or created.
bgneal@522 329
bgneal@522 330 We kick off a Celery task to perform work outside of the request/response
bgneal@522 331 cycle.
bgneal@522 332
bgneal@522 333 """
bgneal@595 334 if kwargs['created']:
bgneal@595 335 forums.tasks.new_topic_task.delay(sender.id)
bgneal@595 336 else:
bgneal@595 337 forums.tasks.updated_topic_task.delay(sender.id)
bgneal@522 338
bgneal@522 339
bgneal@522 340 def process_new_topic(topic_id):
bgneal@522 341 """
bgneal@522 342 This function contains new topic processing. Currently we only update the
bgneal@522 343 topic count statistic.
bgneal@522 344
bgneal@522 345 """
bgneal@522 346 try:
bgneal@522 347 topic = Topic.objects.select_related().get(pk=topic_id)
bgneal@522 348 except Topic.DoesNotExist:
bgneal@522 349 logger.warning("process_new_topic: topic %d does not exist", topic_id)
bgneal@522 350 return
bgneal@522 351
bgneal@522 352 # selectively process topics from non-public forums
bgneal@522 353 public_forums = Forum.objects.public_forum_ids()
bgneal@522 354
bgneal@522 355 if topic.forum.id not in public_forums:
bgneal@522 356 return
bgneal@522 357
bgneal@522 358 # update the topic count statistic
bgneal@523 359 conn = get_redis_connection()
bgneal@522 360
bgneal@523 361 result = conn.incr(TOPIC_COUNT_KEY)
bgneal@522 362 if result == 1:
bgneal@522 363 # it is likely redis got trashed, so re-compute the correct value
bgneal@522 364
bgneal@522 365 count = Topic.objects.filter(forum__in=public_forums).count()
bgneal@523 366 conn.set(TOPIC_COUNT_KEY, count)
bgneal@522 367
bgneal@522 368
bgneal@595 369 def process_updated_topic(topic_id):
bgneal@595 370 """
bgneal@595 371 This function contains updated topic processing. Update the title only.
bgneal@595 372
bgneal@595 373 """
bgneal@595 374 conn = get_redis_connection()
bgneal@595 375 key = UPDATED_TOPIC_KEY % topic_id
bgneal@679 376 topic_json = conn.get(key)
bgneal@679 377 if topic_json is not None:
bgneal@595 378 try:
bgneal@595 379 topic = Topic.objects.get(pk=topic_id)
bgneal@595 380 except Topic.DoesNotExist:
bgneal@595 381 logger.warning("topic %d does not exist", topic_id)
bgneal@595 382 return
bgneal@595 383
bgneal@679 384 topic_dict = json.loads(topic_json)
bgneal@595 385
bgneal@595 386 if topic.name != topic_dict['title']:
bgneal@595 387 topic_dict['title'] = topic.name
bgneal@679 388 topic_json = json.dumps(topic_dict)
bgneal@679 389 conn.set(key, topic_json)
bgneal@595 390
bgneal@595 391
bgneal@522 392 def get_stats():
bgneal@522 393 """
bgneal@522 394 This function returns the topic and post count statistics as a tuple, in
bgneal@522 395 that order. If a statistic is not available, its position in the tuple will
bgneal@522 396 be None.
bgneal@522 397
bgneal@522 398 """
bgneal@522 399 try:
bgneal@523 400 conn = get_redis_connection()
bgneal@523 401 result = conn.mget(TOPIC_COUNT_KEY, POST_COUNT_KEY)
bgneal@522 402 except redis.RedisError, e:
bgneal@522 403 logger.error(e)
bgneal@522 404 return (None, None)
bgneal@522 405
bgneal@522 406 topic_count = int(result[0]) if result[0] else None
bgneal@522 407 post_count = int(result[1]) if result[1] else None
bgneal@522 408
bgneal@522 409 return (topic_count, post_count)
bgneal@522 410
bgneal@522 411
bgneal@522 412 def get_latest_topic_ids(num):
bgneal@522 413 """
bgneal@522 414 Return a list of topic ids from the latest topics that have posts. The ids
bgneal@522 415 will be sorted from newest to oldest.
bgneal@522 416
bgneal@522 417 """
bgneal@522 418 try:
bgneal@523 419 conn = get_redis_connection()
bgneal@523 420 result = conn.zrevrange(UPDATED_TOPICS_SET_KEY, 0, num - 1)
bgneal@522 421 except redis.RedisError, e:
bgneal@522 422 logger.error(e)
bgneal@522 423 return []
bgneal@522 424
bgneal@522 425 return [int(n) for n in result]
bgneal@522 426
bgneal@522 427
bgneal@522 428 def get_latest_topics(num):
bgneal@522 429 """
bgneal@522 430 Return a list of dictionaries with information about the latest topics that
bgneal@522 431 have updated posts. The topics are sorted from newest to oldest.
bgneal@522 432
bgneal@522 433 """
bgneal@522 434 try:
bgneal@523 435 conn = get_redis_connection()
bgneal@523 436 result = conn.zrevrange(UPDATED_TOPICS_SET_KEY, 0, num - 1)
bgneal@522 437
bgneal@522 438 topic_keys = [UPDATED_TOPIC_KEY % n for n in result]
bgneal@524 439 json_list = conn.mget(topic_keys) if topic_keys else []
bgneal@522 440
bgneal@522 441 except redis.RedisError, e:
bgneal@522 442 logger.error(e)
bgneal@522 443 return []
bgneal@522 444
bgneal@522 445 topics = []
bgneal@522 446 for s in json_list:
bgneal@679 447 item = json.loads(s)
bgneal@522 448 item['date'] = datetime.datetime.fromtimestamp(item['date'])
bgneal@522 449 topics.append(item)
bgneal@522 450
bgneal@522 451 return topics
bgneal@522 452
bgneal@522 453
bgneal@522 454 def notify_topic_delete(topic):
bgneal@522 455 """
bgneal@522 456 This function should be called when a topic is deleted. It will remove the
bgneal@522 457 topic from the updated topics set, if present, and delete any info we have
bgneal@522 458 about the topic.
bgneal@522 459
bgneal@522 460 Note we don't do anything like this for posts. Since they just populate RSS
bgneal@522 461 feeds we'll let them 404. The updated topic list is seen in a prominent
bgneal@522 462 template tag however, so it is a bit more important to get that cleaned up.
bgneal@522 463
bgneal@522 464 """
bgneal@522 465 try:
bgneal@523 466 conn = get_redis_connection()
bgneal@523 467 pipeline = conn.pipeline()
bgneal@522 468 pipeline.zrem(UPDATED_TOPICS_SET_KEY, topic.id)
bgneal@522 469 pipeline.delete(UPDATED_TOPIC_KEY % topic.id)
bgneal@522 470 pipeline.execute()
bgneal@522 471 except redis.RedisError, e:
bgneal@522 472 logger.error(e)
bgneal@522 473
bgneal@522 474
bgneal@595 475 def _serialize_post(post):
bgneal@595 476 """Serialize a post to JSON and return it.
bgneal@595 477
bgneal@595 478 """
bgneal@792 479 # Use absolute URLs for smileys for RSS. This means we have to reconvert the
bgneal@792 480 # post Markdown to HTML.
bgneal@792 481 content = site_markup(post.body, relative_urls=False)
bgneal@792 482
bgneal@595 483 # get any attachments for the post
bgneal@595 484 attachments = Attachment.objects.filter(post=post).select_related(
bgneal@595 485 'embed').order_by('order')
bgneal@595 486 embeds = [item.embed for item in attachments]
bgneal@792 487 if len(embeds):
bgneal@595 488 content = render_to_string('forums/post_rss.html', {
bgneal@792 489 'content': content,
bgneal@595 490 'embeds': embeds,
bgneal@595 491 })
bgneal@595 492
bgneal@595 493 # serialize post attributes
bgneal@595 494 post_content = {
bgneal@595 495 'id': post.id,
bgneal@595 496 'title': post.topic.name,
bgneal@595 497 'content': content,
bgneal@595 498 'author': post.user.username,
bgneal@1177 499 'pubdate': post.creation_date.isoformat(),
bgneal@595 500 'forum_name': post.topic.forum.name,
bgneal@595 501 'url': post.get_absolute_url()
bgneal@595 502 }
bgneal@595 503
bgneal@679 504 return json.dumps(post_content)
bgneal@595 505
bgneal@595 506
bgneal@522 507 # Down here to avoid a circular import
bgneal@522 508 import forums.tasks