Mercurial > public > sg101
comparison gpp/forums/latest.py @ 523:e9c446a64423
Was catching redis.RedisError exception, but already had local variable called redis. This seemed to work in development, but failed on Python 2.5.
author | Brian Neal <bgneal@gmail.com> |
---|---|
date | Sun, 18 Dec 2011 23:55:53 +0000 |
parents | 82b97697312e |
children | d9cf6f60b5a5 |
comparison
equal
deleted
inserted
replaced
522:82b97697312e | 523:e9c446a64423 |
---|---|
14 import logging | 14 import logging |
15 import time | 15 import time |
16 | 16 |
17 from django.dispatch import receiver | 17 from django.dispatch import receiver |
18 from django.utils import simplejson | 18 from django.utils import simplejson |
19 import redis | |
19 | 20 |
20 from forums.signals import post_content_update, topic_content_update | 21 from forums.signals import post_content_update, topic_content_update |
21 from forums.models import Forum, Topic, Post | 22 from forums.models import Forum, Topic, Post |
22 from forums.views.subscriptions import notify_topic_subscribers | 23 from forums.views.subscriptions import notify_topic_subscribers |
23 from forums.tools import auto_favorite, auto_subscribe | 24 from forums.tools import auto_favorite, auto_subscribe |
69 | 70 |
70 # selectively process posts from non-public forums | 71 # selectively process posts from non-public forums |
71 public_forums = Forum.objects.public_forum_ids() | 72 public_forums = Forum.objects.public_forum_ids() |
72 | 73 |
73 if post.topic.forum.id in public_forums: | 74 if post.topic.forum.id in public_forums: |
74 redis = get_redis_connection() | 75 conn = get_redis_connection() |
75 _update_post_feeds(redis, post) | 76 _update_post_feeds(conn, post) |
76 _update_post_count(redis, public_forums) | 77 _update_post_count(conn, public_forums) |
77 _update_latest_topics(redis, post) | 78 _update_latest_topics(conn, post) |
78 | 79 |
79 # send out any email notifications | 80 # send out any email notifications |
80 notify_topic_subscribers(post, defer=False) | 81 notify_topic_subscribers(post, defer=False) |
81 | 82 |
82 # perform any auto-favorite and auto-subscribe actions for the new post | 83 # perform any auto-favorite and auto-subscribe actions for the new post |
83 auto_favorite(post) | 84 auto_favorite(post) |
84 auto_subscribe(post) | 85 auto_subscribe(post) |
85 | 86 |
86 | 87 |
87 def _update_post_feeds(redis, post): | 88 def _update_post_feeds(conn, post): |
88 """ | 89 """ |
89 Updates the forum feeds we keep in Redis so that our RSS feeds are quick. | 90 Updates the forum feeds we keep in Redis so that our RSS feeds are quick. |
90 | 91 |
91 """ | 92 """ |
92 # serialize post attributes | 93 # serialize post attributes |
102 | 103 |
103 s = simplejson.dumps(post_content) | 104 s = simplejson.dumps(post_content) |
104 | 105 |
105 # store in Redis | 106 # store in Redis |
106 | 107 |
107 pipeline = redis.pipeline() | 108 pipeline = conn.pipeline() |
108 | 109 |
109 key = 'forums:latest:%d' % post.topic.forum.id | 110 key = 'forums:latest:%d' % post.topic.forum.id |
110 | 111 |
111 pipeline.lpush(key, s) | 112 pipeline.lpush(key, s) |
112 pipeline.ltrim(key, 0, MAX_POSTS - 1) | 113 pipeline.ltrim(key, 0, MAX_POSTS - 1) |
120 pipeline.ltrim(key, 0, MAX_POSTS - 1) | 121 pipeline.ltrim(key, 0, MAX_POSTS - 1) |
121 | 122 |
122 pipeline.execute() | 123 pipeline.execute() |
123 | 124 |
124 | 125 |
125 def _update_post_count(redis, public_forums): | 126 def _update_post_count(conn, public_forums): |
126 """ | 127 """ |
127 Updates the post count we cache in Redis. Doing a COUNT(*) on the post table | 128 Updates the post count we cache in Redis. Doing a COUNT(*) on the post table |
128 can be expensive in MySQL InnoDB. | 129 can be expensive in MySQL InnoDB. |
129 | 130 |
130 """ | 131 """ |
131 result = redis.incr(POST_COUNT_KEY) | 132 result = conn.incr(POST_COUNT_KEY) |
132 if result == 1: | 133 if result == 1: |
133 # it is likely redis got trashed, so re-compute the correct value | 134 # it is likely redis got trashed, so re-compute the correct value |
134 | 135 |
135 count = Post.objects.filter(topic__forum__in=public_forums).count() | 136 count = Post.objects.filter(topic__forum__in=public_forums).count() |
136 redis.set(POST_COUNT_KEY, count) | 137 conn.set(POST_COUNT_KEY, count) |
137 | 138 |
138 | 139 |
139 def _update_latest_topics(redis, post): | 140 def _update_latest_topics(conn, post): |
140 """ | 141 """ |
141 Updates the "latest topics with new posts" list we cache in Redis for speed. | 142 Updates the "latest topics with new posts" list we cache in Redis for speed. |
142 There is a template tag and forum view that uses this information. | 143 There is a template tag and forum view that uses this information. |
143 | 144 |
144 """ | 145 """ |
153 'url': post.get_absolute_url() | 154 'url': post.get_absolute_url() |
154 } | 155 } |
155 json = simplejson.dumps(topic_content) | 156 json = simplejson.dumps(topic_content) |
156 key = UPDATED_TOPIC_KEY % topic_id | 157 key = UPDATED_TOPIC_KEY % topic_id |
157 | 158 |
158 pipeline = redis.pipeline() | 159 pipeline = conn.pipeline() |
159 pipeline.set(key, json) | 160 pipeline.set(key, json) |
160 pipeline.zadd(UPDATED_TOPICS_SET_KEY, topic_score, topic_id) | 161 pipeline.zadd(UPDATED_TOPICS_SET_KEY, topic_score, topic_id) |
161 pipeline.zcard(UPDATED_TOPICS_SET_KEY) | 162 pipeline.zcard(UPDATED_TOPICS_SET_KEY) |
162 results = pipeline.execute() | 163 results = pipeline.execute() |
163 | 164 |
166 num_to_del = num_topics - MAX_UPDATED_TOPICS | 167 num_to_del = num_topics - MAX_UPDATED_TOPICS |
167 if num_to_del > 0: | 168 if num_to_del > 0: |
168 # get the IDs of the topics we need to delete first | 169 # get the IDs of the topics we need to delete first |
169 start = 0 | 170 start = 0 |
170 stop = num_to_del - 1 # Redis indices are inclusive | 171 stop = num_to_del - 1 # Redis indices are inclusive |
171 old_ids = redis.zrange(UPDATED_TOPICS_SET_KEY, start, stop) | 172 old_ids = conn.zrange(UPDATED_TOPICS_SET_KEY, start, stop) |
172 | 173 |
173 keys = [UPDATED_TOPIC_KEY % n for n in old_ids] | 174 keys = [UPDATED_TOPIC_KEY % n for n in old_ids] |
174 redis.delete(*keys) | 175 conn.delete(*keys) |
175 | 176 |
176 # now delete the oldest num_to_del topics | 177 # now delete the oldest num_to_del topics |
177 redis.zremrangebyrank(UPDATED_TOPICS_SET_KEY, start, stop) | 178 conn.zremrangebyrank(UPDATED_TOPICS_SET_KEY, start, stop) |
178 | 179 |
179 | 180 |
180 def get_latest_posts(num_posts=MAX_POSTS, forum_id=None): | 181 def get_latest_posts(num_posts=MAX_POSTS, forum_id=None): |
181 """ | 182 """ |
182 This function retrieves num_posts latest posts for the forum with the given | 183 This function retrieves num_posts latest posts for the forum with the given |
190 num_posts = max(0, min(MAX_POSTS, num_posts)) | 191 num_posts = max(0, min(MAX_POSTS, num_posts)) |
191 | 192 |
192 if num_posts == 0: | 193 if num_posts == 0: |
193 return [] | 194 return [] |
194 | 195 |
195 redis = get_redis_connection() | 196 conn = get_redis_connection() |
196 raw_posts = redis.lrange(key, 0, num_posts - 1) | 197 raw_posts = conn.lrange(key, 0, num_posts - 1) |
197 | 198 |
198 posts = [] | 199 posts = [] |
199 for raw_post in raw_posts: | 200 for raw_post in raw_posts: |
200 post = simplejson.loads(raw_post) | 201 post = simplejson.loads(raw_post) |
201 | 202 |
242 | 243 |
243 if topic.forum.id not in public_forums: | 244 if topic.forum.id not in public_forums: |
244 return | 245 return |
245 | 246 |
246 # update the topic count statistic | 247 # update the topic count statistic |
247 redis = get_redis_connection() | 248 conn = get_redis_connection() |
248 | 249 |
249 result = redis.incr(TOPIC_COUNT_KEY) | 250 result = conn.incr(TOPIC_COUNT_KEY) |
250 if result == 1: | 251 if result == 1: |
251 # it is likely redis got trashed, so re-compute the correct value | 252 # it is likely redis got trashed, so re-compute the correct value |
252 | 253 |
253 count = Topic.objects.filter(forum__in=public_forums).count() | 254 count = Topic.objects.filter(forum__in=public_forums).count() |
254 redis.set(TOPIC_COUNT_KEY, count) | 255 conn.set(TOPIC_COUNT_KEY, count) |
255 | 256 |
256 | 257 |
257 def get_stats(): | 258 def get_stats(): |
258 """ | 259 """ |
259 This function returns the topic and post count statistics as a tuple, in | 260 This function returns the topic and post count statistics as a tuple, in |
260 that order. If a statistic is not available, its position in the tuple will | 261 that order. If a statistic is not available, its position in the tuple will |
261 be None. | 262 be None. |
262 | 263 |
263 """ | 264 """ |
264 try: | 265 try: |
265 redis = get_redis_connection() | 266 conn = get_redis_connection() |
266 result = redis.mget(TOPIC_COUNT_KEY, POST_COUNT_KEY) | 267 result = conn.mget(TOPIC_COUNT_KEY, POST_COUNT_KEY) |
267 except redis.RedisError, e: | 268 except redis.RedisError, e: |
268 logger.error(e) | 269 logger.error(e) |
269 return (None, None) | 270 return (None, None) |
270 | 271 |
271 topic_count = int(result[0]) if result[0] else None | 272 topic_count = int(result[0]) if result[0] else None |
279 Return a list of topic ids from the latest topics that have posts. The ids | 280 Return a list of topic ids from the latest topics that have posts. The ids |
280 will be sorted from newest to oldest. | 281 will be sorted from newest to oldest. |
281 | 282 |
282 """ | 283 """ |
283 try: | 284 try: |
284 redis = get_redis_connection() | 285 conn = get_redis_connection() |
285 result = redis.zrevrange(UPDATED_TOPICS_SET_KEY, 0, num - 1) | 286 result = conn.zrevrange(UPDATED_TOPICS_SET_KEY, 0, num - 1) |
286 except redis.RedisError, e: | 287 except redis.RedisError, e: |
287 logger.error(e) | 288 logger.error(e) |
288 return [] | 289 return [] |
289 | 290 |
290 return [int(n) for n in result] | 291 return [int(n) for n in result] |
295 Return a list of dictionaries with information about the latest topics that | 296 Return a list of dictionaries with information about the latest topics that |
296 have updated posts. The topics are sorted from newest to oldest. | 297 have updated posts. The topics are sorted from newest to oldest. |
297 | 298 |
298 """ | 299 """ |
299 try: | 300 try: |
300 redis = get_redis_connection() | 301 conn = get_redis_connection() |
301 result = redis.zrevrange(UPDATED_TOPICS_SET_KEY, 0, num - 1) | 302 result = conn.zrevrange(UPDATED_TOPICS_SET_KEY, 0, num - 1) |
302 | 303 |
303 topic_keys = [UPDATED_TOPIC_KEY % n for n in result] | 304 topic_keys = [UPDATED_TOPIC_KEY % n for n in result] |
304 json_list = redis.mget(topic_keys) | 305 json_list = conn.mget(topic_keys) |
305 | 306 |
306 except redis.RedisError, e: | 307 except redis.RedisError, e: |
307 logger.error(e) | 308 logger.error(e) |
308 return [] | 309 return [] |
309 | 310 |
326 feeds we'll let them 404. The updated topic list is seen in a prominent | 327 feeds we'll let them 404. The updated topic list is seen in a prominent |
327 template tag however, so it is a bit more important to get that cleaned up. | 328 template tag however, so it is a bit more important to get that cleaned up. |
328 | 329 |
329 """ | 330 """ |
330 try: | 331 try: |
331 redis = get_redis_connection() | 332 conn = get_redis_connection() |
332 pipeline = redis.pipeline() | 333 pipeline = conn.pipeline() |
333 pipeline.zrem(UPDATED_TOPICS_SET_KEY, topic.id) | 334 pipeline.zrem(UPDATED_TOPICS_SET_KEY, topic.id) |
334 pipeline.delete(UPDATED_TOPIC_KEY % topic.id) | 335 pipeline.delete(UPDATED_TOPIC_KEY % topic.id) |
335 pipeline.execute() | 336 pipeline.execute() |
336 except redis.RedisError, e: | 337 except redis.RedisError, e: |
337 logger.error(e) | 338 logger.error(e) |