bgneal@859
|
1 """
|
bgneal@859
|
2 ssl_images is a custom manage.py command to convert forum post and comment
|
bgneal@859
|
3 images to https. It does this by rewriting the markup:
|
bgneal@859
|
4 - Images with src = http://surfguitar101.com/something are rewritten to be
|
bgneal@859
|
5 /something.
|
bgneal@859
|
6 - Non SG101 images that use http: are downloaded, resized, and uploaded to
|
bgneal@859
|
7 an S3 bucket. The src attribute is replaced with the new S3 URL.
|
bgneal@859
|
8 """
|
bgneal@882
|
9 import base64
|
bgneal@895
|
10 import datetime
|
bgneal@899
|
11 import json
|
bgneal@859
|
12 import logging
|
bgneal@859
|
13 from optparse import make_option
|
bgneal@888
|
14 import os
|
bgneal@863
|
15 import re
|
bgneal@863
|
16 import signal
|
bgneal@881
|
17 import socket
|
bgneal@881
|
18 import urllib
|
bgneal@868
|
19 import urlparse
|
bgneal@881
|
20 import uuid
|
bgneal@859
|
21
|
bgneal@859
|
22 from django.core.management.base import NoArgsCommand, CommandError
|
bgneal@859
|
23 from django.conf import settings
|
bgneal@894
|
24 from lxml import etree
|
bgneal@863
|
25 import markdown.inlinepatterns
|
bgneal@881
|
26 from PIL import Image
|
bgneal@859
|
27
|
bgneal@860
|
28 from comments.models import Comment
|
bgneal@860
|
29 from forums.models import Post
|
bgneal@881
|
30 from core.s3 import S3Bucket
|
bgneal@860
|
31
|
bgneal@860
|
32
|
bgneal@859
|
33 LOGFILE = os.path.join(settings.PROJECT_PATH, 'logs', 'ssl_images.log')
|
bgneal@859
|
34 logger = logging.getLogger(__name__)
|
bgneal@859
|
35
|
bgneal@871
|
36 IMAGE_LINK_RE = re.compile(markdown.inlinepatterns.IMAGE_LINK_RE,
|
bgneal@871
|
37 re.DOTALL | re.UNICODE)
|
bgneal@871
|
38 IMAGE_REF_RE = re.compile(markdown.inlinepatterns.IMAGE_REFERENCE_RE,
|
bgneal@871
|
39 re.DOTALL | re.UNICODE)
|
bgneal@863
|
40
|
bgneal@868
|
41 SG101_HOSTS = set(['www.surfguitar101.com', 'surfguitar101.com'])
|
bgneal@963
|
42 WHITELIST_HOSTS = set(settings.USER_IMAGES_SOURCES)
|
bgneal@866
|
43 MODEL_CHOICES = ['comments', 'posts']
|
bgneal@866
|
44
|
bgneal@881
|
45 PHOTO_MAX_SIZE = (660, 720)
|
bgneal@881
|
46 PHOTO_BASE_URL = 'https://s3.amazonaws.com/'
|
bgneal@881
|
47 PHOTO_BUCKET_NAME = 'sg101.forum.photos'
|
bgneal@881
|
48
|
bgneal@899
|
49 CACHE_FILENAME = 'ssl_images_cache.json'
|
bgneal@899
|
50
|
bgneal@863
|
51 quit_flag = False
|
bgneal@881
|
52 opener = None
|
bgneal@881
|
53 bucket = None
|
bgneal@881
|
54 url_cache = {}
|
bgneal@899
|
55 bad_hosts = set()
|
bgneal@863
|
56
|
bgneal@863
|
57
|
bgneal@863
|
58 def signal_handler(signum, frame):
|
bgneal@863
|
59 """SIGINT signal handler"""
|
bgneal@863
|
60 global quit_flag
|
bgneal@863
|
61 quit_flag = True
|
bgneal@863
|
62
|
bgneal@859
|
63
|
bgneal@859
|
64 def _setup_logging():
|
bgneal@859
|
65 logger.setLevel(logging.DEBUG)
|
bgneal@859
|
66 logger.propagate = False
|
bgneal@859
|
67 handler = logging.FileHandler(filename=LOGFILE, encoding='utf-8')
|
bgneal@859
|
68 formatter = logging.Formatter('%(asctime)s %(levelname)s %(message)s')
|
bgneal@859
|
69 handler.setFormatter(formatter)
|
bgneal@859
|
70 logger.addHandler(handler)
|
bgneal@859
|
71
|
bgneal@859
|
72
|
bgneal@881
|
73 class ImageURLopener(urllib.FancyURLopener):
|
bgneal@881
|
74 """Our URL opener. Handles redirects as per FancyURLopener. But all other
|
bgneal@881
|
75 errors and authentication requests will raise an IOError.
|
bgneal@881
|
76 """
|
bgneal@881
|
77 HANDLED_ERRORS = set([302, 301, 303, 307])
|
bgneal@881
|
78
|
bgneal@881
|
79 def http_error_default(self, url, fp, errcode, errmsg, headers):
|
bgneal@881
|
80 return urllib.URLopener.http_error_default(self, url, fp, errcode,
|
bgneal@881
|
81 errmsg, headers)
|
bgneal@881
|
82
|
bgneal@881
|
83 def http_error(self, url, fp, errcode, errmsg, headers, data=None):
|
bgneal@881
|
84 """Handle http errors.
|
bgneal@881
|
85 We let FancyURLopener handle the redirects, but any other error we want
|
bgneal@881
|
86 to let fail.
|
bgneal@881
|
87 """
|
bgneal@881
|
88 if errcode in self.HANDLED_ERRORS:
|
bgneal@881
|
89 name = 'http_error_%d' % errcode
|
bgneal@881
|
90 method = getattr(self, name)
|
bgneal@881
|
91 if data is None:
|
bgneal@881
|
92 result = method(url, fp, errcode, errmsg, headers)
|
bgneal@881
|
93 else:
|
bgneal@881
|
94 result = method(url, fp, errcode, errmsg, headers, data)
|
bgneal@881
|
95 if result:
|
bgneal@881
|
96 return result
|
bgneal@881
|
97 return self.http_error_default(url, fp, errcode, errmsg, headers)
|
bgneal@881
|
98
|
bgneal@881
|
99
|
bgneal@899
|
100 def download_image(parsed_url):
|
bgneal@881
|
101 """Downloads the image file from the given source URL.
|
bgneal@881
|
102
|
bgneal@881
|
103 If successful returns the path to the downloaded file. Otherwise None is
|
bgneal@881
|
104 returned.
|
bgneal@881
|
105 """
|
bgneal@899
|
106 src = parsed_url.geturl()
|
bgneal@881
|
107 logger.info("Retrieving %s", src)
|
bgneal@881
|
108 try:
|
bgneal@881
|
109 fn, hdrs = opener.retrieve(src)
|
bgneal@881
|
110 except IOError as ex:
|
bgneal@899
|
111 args = ex.args if ex.args else []
|
bgneal@881
|
112 if len(args) == 4 and args[0] == 'http error':
|
bgneal@881
|
113 logger.error("http error: %d - %s", args[1], args[2])
|
bgneal@899
|
114 elif len(args) == 2 and isinstance(args[1], socket.gaierror):
|
bgneal@899
|
115 logger.error("gaierror, ignoring host %s", parsed_url.hostname)
|
bgneal@899
|
116 bad_hosts.add(parsed_url.hostname)
|
bgneal@881
|
117 else:
|
bgneal@881
|
118 logger.error("%s", ex)
|
bgneal@881
|
119 return None
|
bgneal@881
|
120
|
bgneal@881
|
121 # Does it look like an image?
|
bgneal@881
|
122 content_type = hdrs.get('content-type')
|
bgneal@881
|
123 if not content_type:
|
bgneal@881
|
124 logger.error("No content-type header found")
|
bgneal@881
|
125 return None
|
bgneal@881
|
126
|
bgneal@888
|
127 file_size = os.stat(fn).st_size
|
bgneal@888
|
128 logger.info("Retrieved: %s bytes; content-type: %s", file_size, content_type)
|
bgneal@881
|
129
|
bgneal@881
|
130 parts = content_type.split('/')
|
bgneal@881
|
131 if len(parts) < 2 or parts[0] != 'image':
|
bgneal@881
|
132 logger.error("Unknown content-type: %s", content_type)
|
bgneal@881
|
133 return None
|
bgneal@881
|
134
|
bgneal@881
|
135 return fn
|
bgneal@881
|
136
|
bgneal@881
|
137
|
bgneal@881
|
138 def resize_image(img_path):
|
bgneal@881
|
139 """Resizes the image found at img_path if necessary."""
|
bgneal@881
|
140 image = Image.open(img_path)
|
bgneal@881
|
141 if image.size > PHOTO_MAX_SIZE:
|
bgneal@881
|
142 logger.info('Resizing from %s to %s', image.size, PHOTO_MAX_SIZE)
|
bgneal@881
|
143 image.thumbnail(PHOTO_MAX_SIZE, Image.ANTIALIAS)
|
bgneal@881
|
144 image.save(img_path)
|
bgneal@881
|
145
|
bgneal@881
|
146
|
bgneal@882
|
147 def gen_key():
|
bgneal@882
|
148 """Return a random key."""
|
bgneal@882
|
149 return base64.b64encode(uuid.uuid4().bytes, '-_').rstrip('=')
|
bgneal@882
|
150
|
bgneal@882
|
151
|
bgneal@881
|
152 def upload_image(img_path):
|
bgneal@881
|
153 """Upload image file located at img_path to our S3 bucket.
|
bgneal@881
|
154
|
bgneal@881
|
155 Returns the URL of the image in the bucket or None if an error occurs.
|
bgneal@881
|
156 """
|
bgneal@881
|
157 logger.info("upload_image starting")
|
bgneal@881
|
158 # Make a unique name for the image in the bucket
|
bgneal@881
|
159 ext = os.path.splitext(img_path)[1]
|
bgneal@882
|
160 file_key = gen_key() + ext
|
bgneal@881
|
161 try:
|
bgneal@881
|
162 return bucket.upload_from_filename(file_key, img_path, public=True)
|
bgneal@881
|
163 except IOError as ex:
|
bgneal@881
|
164 logger.error("Error uploading file: %s", ex)
|
bgneal@881
|
165 return None
|
bgneal@881
|
166
|
bgneal@881
|
167
|
bgneal@888
|
168 def convert_to_ssl(parsed_url):
|
bgneal@888
|
169 """Top-level function for moving an image to SSL."""
|
bgneal@888
|
170
|
bgneal@888
|
171 src = parsed_url.geturl()
|
bgneal@888
|
172
|
bgneal@899
|
173 if parsed_url.hostname in bad_hosts:
|
bgneal@899
|
174 logger.info("Host known to be bad, skipping: %s", src)
|
bgneal@899
|
175 return None
|
bgneal@899
|
176
|
bgneal@899
|
177 # Check the cache
|
bgneal@897
|
178 try:
|
bgneal@897
|
179 new_url = url_cache[src]
|
bgneal@897
|
180 except KeyError:
|
bgneal@897
|
181 # cache miss, try to get the file
|
bgneal@899
|
182 new_url = save_image_to_cloud(parsed_url)
|
bgneal@897
|
183 url_cache[src] = new_url
|
bgneal@897
|
184 else:
|
bgneal@897
|
185 if new_url:
|
bgneal@897
|
186 logger.info("Found URL in cache: %s => %s", src, new_url)
|
bgneal@897
|
187 else:
|
bgneal@897
|
188 logger.info("URL known to be bad, skipping: %s", src)
|
bgneal@888
|
189
|
bgneal@889
|
190 return new_url
|
bgneal@888
|
191
|
bgneal@888
|
192
|
bgneal@899
|
193 def save_image_to_cloud(parsed_url):
|
bgneal@881
|
194 """Downloads an image at a given source URL. Uploads it to cloud storage.
|
bgneal@881
|
195
|
bgneal@881
|
196 Returns the new URL or None if unsuccessful.
|
bgneal@881
|
197 """
|
bgneal@899
|
198 fn = download_image(parsed_url)
|
bgneal@881
|
199 if fn:
|
bgneal@881
|
200 resize_image(fn)
|
bgneal@889
|
201 return upload_image(fn)
|
bgneal@881
|
202 return None
|
bgneal@868
|
203
|
bgneal@868
|
204
|
bgneal@866
|
205 def replace_image_markup(match):
|
bgneal@870
|
206 src_parts = match.group(8).split()
|
bgneal@868
|
207 if src_parts:
|
bgneal@868
|
208 src = src_parts[0]
|
bgneal@868
|
209 if src[0] == "<" and src[-1] == ">":
|
bgneal@868
|
210 src = src[1:-1]
|
bgneal@868
|
211 else:
|
bgneal@868
|
212 src = ''
|
bgneal@868
|
213
|
bgneal@868
|
214 title = ''
|
bgneal@868
|
215 if len(src_parts) > 1:
|
bgneal@868
|
216 title = " ".join(src_parts[1:])
|
bgneal@870
|
217 alt = match.group(1)
|
bgneal@868
|
218
|
bgneal@871
|
219 new_src = None
|
bgneal@868
|
220 if src:
|
bgneal@868
|
221 r = urlparse.urlparse(src)
|
bgneal@871
|
222 if r.hostname in SG101_HOSTS:
|
bgneal@871
|
223 new_src = r.path # convert to relative path
|
bgneal@871
|
224 elif r.scheme == 'http':
|
bgneal@888
|
225 # Try a few things to get this on ssl:
|
bgneal@888
|
226 new_src = convert_to_ssl(r)
|
bgneal@868
|
227 elif r.scheme == 'https':
|
bgneal@963
|
228 if r.hostname in WHITELIST_HOSTS:
|
bgneal@963
|
229 new_src = src # already in whitelist
|
bgneal@963
|
230 else:
|
bgneal@963
|
231 new_src = convert_to_ssl(r)
|
bgneal@868
|
232
|
bgneal@868
|
233 if new_src:
|
bgneal@868
|
234 if title:
|
bgneal@871
|
235 s = u'![{alt}]({src} {title})'.format(alt=alt, src=new_src, title=title)
|
bgneal@868
|
236 else:
|
bgneal@868
|
237 s = u'![{alt}]({src})'.format(alt=alt, src=new_src)
|
bgneal@868
|
238 else:
|
bgneal@868
|
239 # something's messed up, convert to a link using original src
|
bgneal@868
|
240 s = u'[{alt}]({src})'.format(alt=alt, src=src)
|
bgneal@868
|
241
|
bgneal@868
|
242 return s
|
bgneal@860
|
243
|
bgneal@860
|
244
|
bgneal@887
|
245 def warn_if_image_refs(text, model_name, pk):
|
bgneal@887
|
246 """Search text for Markdown image reference markup.
|
bgneal@887
|
247
|
bgneal@887
|
248 We aren't expecting these, but we will log something if we see any.
|
bgneal@887
|
249 """
|
bgneal@887
|
250 if IMAGE_REF_RE.search(text):
|
bgneal@887
|
251 logger.warning("Image reference found in %s pk = #%d", model_name, pk)
|
bgneal@887
|
252
|
bgneal@887
|
253
|
bgneal@866
|
254 def process_post(text):
|
bgneal@863
|
255 """Process the post object:
|
bgneal@863
|
256
|
bgneal@863
|
257 A regex substitution is run on the post's text field. This fixes up image
|
bgneal@863
|
258 links, getting rid of plain old http sources; either converting to https
|
bgneal@863
|
259 or relative style links (if the link is to SG101).
|
bgneal@863
|
260
|
bgneal@863
|
261 """
|
bgneal@866
|
262 return IMAGE_LINK_RE.sub(replace_image_markup, text)
|
bgneal@863
|
263
|
bgneal@863
|
264
|
bgneal@894
|
265 def html_check(html):
|
bgneal@894
|
266 """Return True if the given HTML fragment has <img> tags with src attributes
|
bgneal@894
|
267 that use http, and False otherwise.
|
bgneal@894
|
268 """
|
bgneal@894
|
269 if not html:
|
bgneal@894
|
270 return False
|
bgneal@894
|
271
|
bgneal@894
|
272 root = etree.HTML(html)
|
bgneal@894
|
273 for img in root.iter('img'):
|
bgneal@894
|
274 src = img.get('src')
|
bgneal@894
|
275 if src and src.lower().startswith('http:'):
|
bgneal@894
|
276 return True
|
bgneal@894
|
277 return False
|
bgneal@894
|
278
|
bgneal@894
|
279
|
bgneal@859
|
280 class Command(NoArgsCommand):
|
bgneal@859
|
281 help = "Rewrite forum posts and comments to not use http for images"
|
bgneal@859
|
282 option_list = NoArgsCommand.option_list + (
|
bgneal@866
|
283 make_option('-m', '--model',
|
bgneal@866
|
284 choices=MODEL_CHOICES,
|
bgneal@866
|
285 help="which model to update; must be one of {{{}}}".format(
|
bgneal@866
|
286 ', '.join(MODEL_CHOICES))),
|
bgneal@860
|
287 make_option('-i', '--i',
|
bgneal@859
|
288 type='int',
|
bgneal@863
|
289 help="optional first slice index; the i in [i:j]"),
|
bgneal@860
|
290 make_option('-j', '--j',
|
bgneal@859
|
291 type='int',
|
bgneal@863
|
292 help="optional second slice index; the j in [i:j]"),
|
bgneal@898
|
293 make_option('-t', '--timeout',
|
bgneal@898
|
294 type='int',
|
bgneal@898
|
295 help="optional socket timeout (secs)"),
|
bgneal@859
|
296 )
|
bgneal@859
|
297
|
bgneal@859
|
298 def handle_noargs(self, **options):
|
bgneal@895
|
299 time_started = datetime.datetime.now()
|
bgneal@859
|
300 _setup_logging()
|
bgneal@860
|
301 logger.info("Starting; arguments received: %s", options)
|
bgneal@859
|
302
|
bgneal@866
|
303 if options['model'] not in MODEL_CHOICES:
|
bgneal@866
|
304 raise CommandError('Please choose a --model option')
|
bgneal@859
|
305
|
bgneal@866
|
306 if options['model'] == 'comments':
|
bgneal@860
|
307 qs = Comment.objects.all()
|
bgneal@866
|
308 text_attr = 'comment'
|
bgneal@881
|
309 model_name = 'Comment'
|
bgneal@860
|
310 else:
|
bgneal@860
|
311 qs = Post.objects.all()
|
bgneal@866
|
312 text_attr = 'body'
|
bgneal@881
|
313 model_name = 'Post'
|
bgneal@860
|
314
|
bgneal@860
|
315 i, j = options['i'], options['j']
|
bgneal@860
|
316
|
bgneal@860
|
317 if i is not None and i < 0:
|
bgneal@860
|
318 raise CommandError("-i must be >= 0")
|
bgneal@860
|
319 if j is not None and j < 0:
|
bgneal@860
|
320 raise CommandError("-j must be >= 0")
|
bgneal@860
|
321 if j is not None and i is not None and j <= i:
|
bgneal@860
|
322 raise CommandError("-j must be > -i")
|
bgneal@860
|
323
|
bgneal@860
|
324 if i is not None and j is not None:
|
bgneal@860
|
325 qs = qs[i:j]
|
bgneal@860
|
326 elif i is not None and j is None:
|
bgneal@860
|
327 qs = qs[i:]
|
bgneal@860
|
328 elif i is None and j is not None:
|
bgneal@860
|
329 qs = qs[:j]
|
bgneal@860
|
330
|
bgneal@881
|
331 # Set global socket timeout
|
bgneal@898
|
332 timeout = options.get('timeout', 30)
|
bgneal@898
|
333 logger.info("Setting socket timeout to %d", timeout)
|
bgneal@898
|
334 socket.setdefaulttimeout(timeout)
|
bgneal@881
|
335
|
bgneal@863
|
336 # Install signal handler for ctrl-c
|
bgneal@863
|
337 signal.signal(signal.SIGINT, signal_handler)
|
bgneal@863
|
338
|
bgneal@881
|
339 # Create URL opener to download photos
|
bgneal@881
|
340 global opener
|
bgneal@881
|
341 opener = ImageURLopener()
|
bgneal@881
|
342
|
bgneal@881
|
343 # Create bucket to upload photos
|
bgneal@881
|
344 global bucket
|
bgneal@881
|
345 bucket = S3Bucket(access_key=settings.USER_PHOTOS_ACCESS_KEY,
|
bgneal@881
|
346 secret_key=settings.USER_PHOTOS_SECRET_KEY,
|
bgneal@881
|
347 base_url=PHOTO_BASE_URL,
|
bgneal@881
|
348 bucket_name=PHOTO_BUCKET_NAME)
|
bgneal@887
|
349
|
bgneal@899
|
350 # Load cached info from previous runs
|
bgneal@899
|
351 load_cache()
|
bgneal@899
|
352
|
bgneal@887
|
353 if i is None:
|
bgneal@887
|
354 i = 0
|
bgneal@887
|
355
|
bgneal@895
|
356 count = 0
|
bgneal@881
|
357 for n, model in enumerate(qs.iterator()):
|
bgneal@863
|
358 if quit_flag:
|
bgneal@863
|
359 logger.warning("SIGINT received, exiting")
|
bgneal@881
|
360 break
|
bgneal@881
|
361 logger.info("Processing %s #%d (pk = %d)", model_name, n + i, model.pk)
|
bgneal@866
|
362 txt = getattr(model, text_attr)
|
bgneal@887
|
363 warn_if_image_refs(txt, model_name, model.pk)
|
bgneal@866
|
364 new_txt = process_post(txt)
|
bgneal@881
|
365 if txt != new_txt:
|
bgneal@889
|
366 logger.info("Content changed on %s #%d (pk = %d)",
|
bgneal@887
|
367 model_name, n + i, model.pk)
|
bgneal@881
|
368 logger.debug("original: %s", txt)
|
bgneal@881
|
369 logger.debug("changed: %s", new_txt)
|
bgneal@887
|
370 setattr(model, text_attr, new_txt)
|
bgneal@887
|
371 model.save()
|
bgneal@894
|
372 elif html_check(model.html):
|
bgneal@894
|
373 # Check for content generated with older smiley code that used
|
bgneal@894
|
374 # absolute URLs for the smiley images. If True, then just save
|
bgneal@894
|
375 # the model again to force updated HTML to be created.
|
bgneal@894
|
376 logger.info("Older Smiley HTML detected, forcing a save")
|
bgneal@894
|
377 model.save()
|
bgneal@895
|
378 count += 1
|
bgneal@860
|
379
|
bgneal@895
|
380 time_finished = datetime.datetime.now()
|
bgneal@895
|
381 elapsed = time_finished - time_started
|
bgneal@895
|
382 logger.info("ssl_images exiting; number of objects: %d; elapsed: %s",
|
bgneal@895
|
383 count, elapsed)
|
bgneal@897
|
384
|
bgneal@897
|
385 http_images = len(url_cache)
|
bgneal@897
|
386 https_images = sum(1 for v in url_cache.itervalues() if v)
|
bgneal@897
|
387 bad_images = http_images - https_images
|
bgneal@897
|
388 if http_images > 0:
|
bgneal@897
|
389 pct_saved = float(https_images) / http_images * 100.0
|
bgneal@897
|
390 else:
|
bgneal@897
|
391 pct_saved = 0.0
|
bgneal@897
|
392
|
bgneal@897
|
393 logger.info("Summary: http: %d; https: %d; lost: %d; saved: %3.1f %%",
|
bgneal@897
|
394 http_images, https_images, bad_images, pct_saved)
|
bgneal@899
|
395
|
bgneal@899
|
396 save_cache()
|
bgneal@899
|
397 logger.info("ssl_images done")
|
bgneal@899
|
398
|
bgneal@899
|
399
|
bgneal@899
|
400 def load_cache():
|
bgneal@899
|
401 """Load cache from previous runs."""
|
bgneal@899
|
402 logger.info("Loading cached information")
|
bgneal@899
|
403 try:
|
bgneal@899
|
404 with open(CACHE_FILENAME, 'r') as fp:
|
bgneal@899
|
405 d = json.load(fp)
|
bgneal@899
|
406 except IOError as ex:
|
bgneal@899
|
407 logger.error("Cache file (%s) IOError: %s", CACHE_FILENAME, ex)
|
bgneal@899
|
408 return
|
bgneal@899
|
409 except ValueError:
|
bgneal@899
|
410 logger.error("Mangled cache file: %s", CACHE_FILENAME)
|
bgneal@899
|
411 return
|
bgneal@899
|
412
|
bgneal@899
|
413 global bad_hosts, url_cache
|
bgneal@899
|
414 try:
|
bgneal@899
|
415 bad_hosts = set(d['bad_hosts'])
|
bgneal@899
|
416 url_cache = d['url_cache']
|
bgneal@899
|
417 except KeyError:
|
bgneal@899
|
418 logger.error("Malformed cache file: %s", CACHE_FILENAME)
|
bgneal@899
|
419
|
bgneal@899
|
420
|
bgneal@899
|
421 def save_cache():
|
bgneal@899
|
422 """Save our cache to a file for future runs."""
|
bgneal@899
|
423 logger.info("Saving cached information")
|
bgneal@899
|
424 d = {'bad_hosts': list(bad_hosts), 'url_cache': url_cache}
|
bgneal@899
|
425 with open(CACHE_FILENAME, 'w') as fp:
|
bgneal@899
|
426 json.dump(d, fp, indent=4)
|