bgneal@859
|
1 """
|
bgneal@859
|
2 ssl_images is a custom manage.py command to convert forum post and comment
|
bgneal@859
|
3 images to https. It does this by rewriting the markup:
|
bgneal@859
|
4 - Images with src = http://surfguitar101.com/something are rewritten to be
|
bgneal@859
|
5 /something.
|
bgneal@859
|
6 - Non SG101 images that use http: are downloaded, resized, and uploaded to
|
bgneal@859
|
7 an S3 bucket. The src attribute is replaced with the new S3 URL.
|
bgneal@859
|
8 """
|
bgneal@882
|
9 import base64
|
bgneal@895
|
10 import datetime
|
bgneal@899
|
11 import json
|
bgneal@859
|
12 import logging
|
bgneal@859
|
13 from optparse import make_option
|
bgneal@888
|
14 import os
|
bgneal@863
|
15 import re
|
bgneal@863
|
16 import signal
|
bgneal@868
|
17 import urlparse
|
bgneal@881
|
18 import uuid
|
bgneal@859
|
19
|
bgneal@859
|
20 from django.core.management.base import NoArgsCommand, CommandError
|
bgneal@859
|
21 from django.conf import settings
|
bgneal@894
|
22 from lxml import etree
|
bgneal@987
|
23 import lxml.html
|
bgneal@863
|
24 import markdown.inlinepatterns
|
bgneal@881
|
25 from PIL import Image
|
bgneal@979
|
26 import requests
|
bgneal@859
|
27
|
bgneal@1012
|
28 from bio.models import UserProfile
|
bgneal@860
|
29 from comments.models import Comment
|
bgneal@860
|
30 from forums.models import Post
|
bgneal@979
|
31 from core.download import download_file
|
bgneal@979
|
32 from core.functions import remove_file
|
bgneal@881
|
33 from core.s3 import S3Bucket
|
bgneal@987
|
34 from news.models import Story
|
bgneal@860
|
35
|
bgneal@860
|
36
|
bgneal@859
|
37 LOGFILE = os.path.join(settings.PROJECT_PATH, 'logs', 'ssl_images.log')
|
bgneal@859
|
38 logger = logging.getLogger(__name__)
|
bgneal@859
|
39
|
bgneal@871
|
40 IMAGE_LINK_RE = re.compile(markdown.inlinepatterns.IMAGE_LINK_RE,
|
bgneal@871
|
41 re.DOTALL | re.UNICODE)
|
bgneal@871
|
42 IMAGE_REF_RE = re.compile(markdown.inlinepatterns.IMAGE_REFERENCE_RE,
|
bgneal@871
|
43 re.DOTALL | re.UNICODE)
|
bgneal@863
|
44
|
bgneal@868
|
45 SG101_HOSTS = set(['www.surfguitar101.com', 'surfguitar101.com'])
|
bgneal@963
|
46 WHITELIST_HOSTS = set(settings.USER_IMAGES_SOURCES)
|
bgneal@1012
|
47 MODEL_CHOICES = ['comments', 'posts', 'news', 'profiles']
|
bgneal@866
|
48
|
bgneal@881
|
49 PHOTO_MAX_SIZE = (660, 720)
|
bgneal@979
|
50 PHOTO_BASE_URL = settings.HOT_LINK_PHOTOS_BASE_URL
|
bgneal@979
|
51 PHOTO_BUCKET_NAME = settings.HOT_LINK_PHOTOS_BUCKET
|
bgneal@881
|
52
|
bgneal@899
|
53 CACHE_FILENAME = 'ssl_images_cache.json'
|
bgneal@899
|
54
|
bgneal@863
|
55 quit_flag = False
|
bgneal@881
|
56 bucket = None
|
bgneal@881
|
57 url_cache = {}
|
bgneal@899
|
58 bad_hosts = set()
|
bgneal@980
|
59 request_timeout = None
|
bgneal@863
|
60
|
bgneal@863
|
61
|
bgneal@863
|
62 def signal_handler(signum, frame):
|
bgneal@863
|
63 """SIGINT signal handler"""
|
bgneal@863
|
64 global quit_flag
|
bgneal@863
|
65 quit_flag = True
|
bgneal@863
|
66
|
bgneal@859
|
67
|
bgneal@859
|
68 def _setup_logging():
|
bgneal@859
|
69 logger.setLevel(logging.DEBUG)
|
bgneal@859
|
70 logger.propagate = False
|
bgneal@859
|
71 handler = logging.FileHandler(filename=LOGFILE, encoding='utf-8')
|
bgneal@859
|
72 formatter = logging.Formatter('%(asctime)s %(levelname)s %(message)s')
|
bgneal@859
|
73 handler.setFormatter(formatter)
|
bgneal@859
|
74 logger.addHandler(handler)
|
bgneal@859
|
75
|
bgneal@981
|
76 requests_log = logging.getLogger("requests.packages.urllib3")
|
bgneal@981
|
77 requests_log.setLevel(logging.INFO)
|
bgneal@981
|
78 requests_log.propagate = True
|
bgneal@981
|
79 requests_log.addHandler(handler)
|
bgneal@981
|
80
|
bgneal@982
|
81 dl_log = logging.getLogger("core.download")
|
bgneal@982
|
82 dl_log.setLevel(logging.INFO)
|
bgneal@982
|
83 dl_log.propagate = True
|
bgneal@982
|
84 dl_log.addHandler(handler)
|
bgneal@982
|
85
|
bgneal@859
|
86
|
bgneal@979
|
87 def resize_image(img_path):
|
bgneal@979
|
88 """Resizes the image found at img_path if necessary.
|
bgneal@979
|
89
|
bgneal@979
|
90 Returns True if the image was resized or resizing wasn't necessary.
|
bgneal@979
|
91 Returns False if the image could not be read or processed.
|
bgneal@881
|
92 """
|
bgneal@979
|
93 try:
|
bgneal@979
|
94 image = Image.open(img_path)
|
bgneal@979
|
95 except IOError as ex:
|
bgneal@979
|
96 logger.error("Error opening %s: %s", img_path, ex)
|
bgneal@979
|
97 return False
|
bgneal@881
|
98
|
bgneal@881
|
99 if image.size > PHOTO_MAX_SIZE:
|
bgneal@881
|
100 logger.info('Resizing from %s to %s', image.size, PHOTO_MAX_SIZE)
|
bgneal@981
|
101 try:
|
bgneal@981
|
102 image.thumbnail(PHOTO_MAX_SIZE, Image.ANTIALIAS)
|
bgneal@981
|
103 image.save(img_path)
|
bgneal@981
|
104 except IOError as ex:
|
bgneal@981
|
105 logger.error("Error resizing image from %s: %s", img_path, ex)
|
bgneal@981
|
106 return False
|
bgneal@881
|
107
|
bgneal@979
|
108 return True
|
bgneal@979
|
109
|
bgneal@881
|
110
|
bgneal@882
|
111 def gen_key():
|
bgneal@882
|
112 """Return a random key."""
|
bgneal@882
|
113 return base64.b64encode(uuid.uuid4().bytes, '-_').rstrip('=')
|
bgneal@882
|
114
|
bgneal@882
|
115
|
bgneal@881
|
116 def upload_image(img_path):
|
bgneal@881
|
117 """Upload image file located at img_path to our S3 bucket.
|
bgneal@881
|
118
|
bgneal@881
|
119 Returns the URL of the image in the bucket or None if an error occurs.
|
bgneal@881
|
120 """
|
bgneal@881
|
121 logger.info("upload_image starting")
|
bgneal@881
|
122 # Make a unique name for the image in the bucket
|
bgneal@881
|
123 ext = os.path.splitext(img_path)[1]
|
bgneal@882
|
124 file_key = gen_key() + ext
|
bgneal@881
|
125 try:
|
bgneal@881
|
126 return bucket.upload_from_filename(file_key, img_path, public=True)
|
bgneal@881
|
127 except IOError as ex:
|
bgneal@881
|
128 logger.error("Error uploading file: %s", ex)
|
bgneal@881
|
129 return None
|
bgneal@881
|
130
|
bgneal@881
|
131
|
bgneal@888
|
132 def convert_to_ssl(parsed_url):
|
bgneal@888
|
133 """Top-level function for moving an image to SSL."""
|
bgneal@888
|
134
|
bgneal@888
|
135 src = parsed_url.geturl()
|
bgneal@888
|
136
|
bgneal@899
|
137 if parsed_url.hostname in bad_hosts:
|
bgneal@899
|
138 logger.info("Host known to be bad, skipping: %s", src)
|
bgneal@899
|
139 return None
|
bgneal@899
|
140
|
bgneal@899
|
141 # Check the cache
|
bgneal@897
|
142 try:
|
bgneal@897
|
143 new_url = url_cache[src]
|
bgneal@897
|
144 except KeyError:
|
bgneal@897
|
145 # cache miss, try to get the file
|
bgneal@899
|
146 new_url = save_image_to_cloud(parsed_url)
|
bgneal@897
|
147 url_cache[src] = new_url
|
bgneal@897
|
148 else:
|
bgneal@897
|
149 if new_url:
|
bgneal@897
|
150 logger.info("Found URL in cache: %s => %s", src, new_url)
|
bgneal@897
|
151 else:
|
bgneal@897
|
152 logger.info("URL known to be bad, skipping: %s", src)
|
bgneal@888
|
153
|
bgneal@889
|
154 return new_url
|
bgneal@888
|
155
|
bgneal@888
|
156
|
bgneal@899
|
157 def save_image_to_cloud(parsed_url):
|
bgneal@881
|
158 """Downloads an image at a given source URL. Uploads it to cloud storage.
|
bgneal@881
|
159
|
bgneal@881
|
160 Returns the new URL or None if unsuccessful.
|
bgneal@881
|
161 """
|
bgneal@979
|
162 url = parsed_url.geturl()
|
bgneal@979
|
163 fn = None
|
bgneal@979
|
164 try:
|
bgneal@980
|
165 fn = download_file(url, timeout=request_timeout)
|
bgneal@979
|
166 except requests.ConnectionError as ex:
|
bgneal@979
|
167 logger.error("ConnectionError, ignoring host %s", parsed_url.hostname)
|
bgneal@979
|
168 bad_hosts.add(parsed_url.hostname)
|
bgneal@979
|
169 except requests.RequestException as ex:
|
bgneal@979
|
170 logger.error("%s", ex)
|
bgneal@979
|
171 except Exception as ex:
|
bgneal@979
|
172 logger.exception("%s", ex)
|
bgneal@979
|
173
|
bgneal@881
|
174 if fn:
|
bgneal@979
|
175 with remove_file(fn):
|
bgneal@979
|
176 if resize_image(fn):
|
bgneal@979
|
177 return upload_image(fn)
|
bgneal@881
|
178 return None
|
bgneal@868
|
179
|
bgneal@868
|
180
|
bgneal@866
|
181 def replace_image_markup(match):
|
bgneal@870
|
182 src_parts = match.group(8).split()
|
bgneal@868
|
183 if src_parts:
|
bgneal@868
|
184 src = src_parts[0]
|
bgneal@868
|
185 if src[0] == "<" and src[-1] == ">":
|
bgneal@868
|
186 src = src[1:-1]
|
bgneal@868
|
187 else:
|
bgneal@868
|
188 src = ''
|
bgneal@868
|
189
|
bgneal@868
|
190 title = ''
|
bgneal@868
|
191 if len(src_parts) > 1:
|
bgneal@868
|
192 title = " ".join(src_parts[1:])
|
bgneal@870
|
193 alt = match.group(1)
|
bgneal@868
|
194
|
bgneal@871
|
195 new_src = None
|
bgneal@868
|
196 if src:
|
bgneal@986
|
197 try:
|
bgneal@986
|
198 r = urlparse.urlparse(src)
|
bgneal@986
|
199 except ValueError:
|
bgneal@986
|
200 return u'{bad image}'
|
bgneal@986
|
201
|
bgneal@871
|
202 if r.hostname in SG101_HOSTS:
|
bgneal@871
|
203 new_src = r.path # convert to relative path
|
bgneal@871
|
204 elif r.scheme == 'http':
|
bgneal@888
|
205 # Try a few things to get this on ssl:
|
bgneal@888
|
206 new_src = convert_to_ssl(r)
|
bgneal@868
|
207 elif r.scheme == 'https':
|
bgneal@963
|
208 if r.hostname in WHITELIST_HOSTS:
|
bgneal@963
|
209 new_src = src # already in whitelist
|
bgneal@963
|
210 else:
|
bgneal@963
|
211 new_src = convert_to_ssl(r)
|
bgneal@868
|
212
|
bgneal@868
|
213 if new_src:
|
bgneal@868
|
214 if title:
|
bgneal@871
|
215 s = u'![{alt}]({src} {title})'.format(alt=alt, src=new_src, title=title)
|
bgneal@868
|
216 else:
|
bgneal@868
|
217 s = u'![{alt}]({src})'.format(alt=alt, src=new_src)
|
bgneal@868
|
218 else:
|
bgneal@868
|
219 # something's messed up, convert to a link using original src
|
bgneal@868
|
220 s = u'[{alt}]({src})'.format(alt=alt, src=src)
|
bgneal@868
|
221
|
bgneal@868
|
222 return s
|
bgneal@860
|
223
|
bgneal@860
|
224
|
bgneal@887
|
225 def warn_if_image_refs(text, model_name, pk):
|
bgneal@887
|
226 """Search text for Markdown image reference markup.
|
bgneal@887
|
227
|
bgneal@887
|
228 We aren't expecting these, but we will log something if we see any.
|
bgneal@887
|
229 """
|
bgneal@887
|
230 if IMAGE_REF_RE.search(text):
|
bgneal@887
|
231 logger.warning("Image reference found in %s pk = #%d", model_name, pk)
|
bgneal@887
|
232
|
bgneal@887
|
233
|
bgneal@866
|
234 def process_post(text):
|
bgneal@863
|
235 """Process the post object:
|
bgneal@863
|
236
|
bgneal@863
|
237 A regex substitution is run on the post's text field. This fixes up image
|
bgneal@863
|
238 links, getting rid of plain old http sources; either converting to https
|
bgneal@863
|
239 or relative style links (if the link is to SG101).
|
bgneal@863
|
240
|
bgneal@863
|
241 """
|
bgneal@866
|
242 return IMAGE_LINK_RE.sub(replace_image_markup, text)
|
bgneal@863
|
243
|
bgneal@863
|
244
|
bgneal@987
|
245 def process_html(html):
|
bgneal@987
|
246 """Process the html fragment, converting to https where needed."""
|
bgneal@987
|
247 s = html.strip()
|
bgneal@987
|
248 if not s:
|
bgneal@987
|
249 return s
|
bgneal@987
|
250
|
bgneal@987
|
251 changed = False
|
bgneal@987
|
252 root = lxml.html.fragment_fromstring(s, create_parent=True)
|
bgneal@987
|
253 for img in root.iter('img'):
|
bgneal@987
|
254 src = img.get('src')
|
bgneal@987
|
255 src = src.strip() if src else ''
|
bgneal@987
|
256 if src:
|
bgneal@987
|
257 try:
|
bgneal@987
|
258 r = urlparse.urlparse(src)
|
bgneal@987
|
259 except ValueError:
|
bgneal@987
|
260 logger.warning("Bad url? Should not happen; skipping...")
|
bgneal@987
|
261 continue
|
bgneal@987
|
262
|
bgneal@987
|
263 new_src = None
|
bgneal@987
|
264 if r.hostname in SG101_HOSTS:
|
bgneal@987
|
265 new_src = r.path # convert to relative path
|
bgneal@987
|
266 elif ((r.scheme == 'http') or
|
bgneal@987
|
267 (r.scheme == 'https' and r.hostname not in WHITELIST_HOSTS)):
|
bgneal@987
|
268 new_src = convert_to_ssl(r)
|
bgneal@987
|
269 if not new_src:
|
bgneal@987
|
270 # failed to convert to https; convert to a link
|
bgneal@987
|
271 tail = img.tail
|
bgneal@987
|
272 img.clear()
|
bgneal@987
|
273 img.tag = 'a'
|
bgneal@987
|
274 img.set('href', src)
|
bgneal@987
|
275 img.text = 'Image'
|
bgneal@987
|
276 img.tail = tail
|
bgneal@987
|
277 changed = True
|
bgneal@987
|
278
|
bgneal@987
|
279 if new_src:
|
bgneal@987
|
280 img.set('src', new_src)
|
bgneal@987
|
281 changed = True
|
bgneal@987
|
282
|
bgneal@987
|
283 if changed:
|
bgneal@987
|
284 result = lxml.html.tostring(root, encoding='utf-8')
|
bgneal@988
|
285 result = result[5:-6] # strip off parent div we added
|
bgneal@988
|
286 return result.decode('utf-8')
|
bgneal@987
|
287 return html
|
bgneal@987
|
288
|
bgneal@987
|
289
|
bgneal@894
|
290 def html_check(html):
|
bgneal@894
|
291 """Return True if the given HTML fragment has <img> tags with src attributes
|
bgneal@894
|
292 that use http, and False otherwise.
|
bgneal@894
|
293 """
|
bgneal@894
|
294 if not html:
|
bgneal@894
|
295 return False
|
bgneal@894
|
296
|
bgneal@894
|
297 root = etree.HTML(html)
|
bgneal@894
|
298 for img in root.iter('img'):
|
bgneal@894
|
299 src = img.get('src')
|
bgneal@894
|
300 if src and src.lower().startswith('http:'):
|
bgneal@894
|
301 return True
|
bgneal@894
|
302 return False
|
bgneal@894
|
303
|
bgneal@894
|
304
|
bgneal@859
|
305 class Command(NoArgsCommand):
|
bgneal@859
|
306 help = "Rewrite forum posts and comments to not use http for images"
|
bgneal@859
|
307 option_list = NoArgsCommand.option_list + (
|
bgneal@866
|
308 make_option('-m', '--model',
|
bgneal@866
|
309 choices=MODEL_CHOICES,
|
bgneal@866
|
310 help="which model to update; must be one of {{{}}}".format(
|
bgneal@866
|
311 ', '.join(MODEL_CHOICES))),
|
bgneal@860
|
312 make_option('-i', '--i',
|
bgneal@859
|
313 type='int',
|
bgneal@863
|
314 help="optional first slice index; the i in [i:j]"),
|
bgneal@860
|
315 make_option('-j', '--j',
|
bgneal@859
|
316 type='int',
|
bgneal@863
|
317 help="optional second slice index; the j in [i:j]"),
|
bgneal@898
|
318 make_option('-t', '--timeout',
|
bgneal@980
|
319 type='float',
|
bgneal@979
|
320 help="optional socket timeout (secs)",
|
bgneal@980
|
321 default=30.0),
|
bgneal@859
|
322 )
|
bgneal@859
|
323
|
bgneal@859
|
324 def handle_noargs(self, **options):
|
bgneal@895
|
325 time_started = datetime.datetime.now()
|
bgneal@859
|
326 _setup_logging()
|
bgneal@860
|
327 logger.info("Starting; arguments received: %s", options)
|
bgneal@859
|
328
|
bgneal@866
|
329 if options['model'] not in MODEL_CHOICES:
|
bgneal@866
|
330 raise CommandError('Please choose a --model option')
|
bgneal@859
|
331
|
bgneal@1012
|
332 save_kwargs = {}
|
bgneal@866
|
333 if options['model'] == 'comments':
|
bgneal@860
|
334 qs = Comment.objects.all()
|
bgneal@987
|
335 text_attrs = ['comment']
|
bgneal@881
|
336 model_name = 'Comment'
|
bgneal@987
|
337 elif options['model'] == 'posts':
|
bgneal@987
|
338 qs = Post.objects.all()
|
bgneal@987
|
339 text_attrs = ['body']
|
bgneal@987
|
340 model_name = 'Post'
|
bgneal@1012
|
341 elif options['model'] == 'profiles':
|
bgneal@1012
|
342 qs = UserProfile.objects.all()
|
bgneal@1012
|
343 text_attrs = ['profile_text', 'signature']
|
bgneal@1012
|
344 model_name = 'UserProfile'
|
bgneal@1012
|
345 save_kwargs = {'content_update': True}
|
bgneal@860
|
346 else:
|
bgneal@987
|
347 qs = Story.objects.all()
|
bgneal@987
|
348 text_attrs = ['short_text', 'long_text']
|
bgneal@987
|
349 model_name = 'Story'
|
bgneal@987
|
350
|
bgneal@987
|
351 html_based = options['model'] == 'news'
|
bgneal@860
|
352
|
bgneal@860
|
353 i, j = options['i'], options['j']
|
bgneal@860
|
354
|
bgneal@860
|
355 if i is not None and i < 0:
|
bgneal@860
|
356 raise CommandError("-i must be >= 0")
|
bgneal@860
|
357 if j is not None and j < 0:
|
bgneal@860
|
358 raise CommandError("-j must be >= 0")
|
bgneal@860
|
359 if j is not None and i is not None and j <= i:
|
bgneal@860
|
360 raise CommandError("-j must be > -i")
|
bgneal@860
|
361
|
bgneal@860
|
362 if i is not None and j is not None:
|
bgneal@860
|
363 qs = qs[i:j]
|
bgneal@860
|
364 elif i is not None and j is None:
|
bgneal@860
|
365 qs = qs[i:]
|
bgneal@860
|
366 elif i is None and j is not None:
|
bgneal@860
|
367 qs = qs[:j]
|
bgneal@860
|
368
|
bgneal@881
|
369 # Set global socket timeout
|
bgneal@980
|
370 global request_timeout
|
bgneal@980
|
371 request_timeout = options.get('timeout')
|
bgneal@980
|
372 logger.info("Using socket timeout of %4.2f", request_timeout)
|
bgneal@881
|
373
|
bgneal@863
|
374 # Install signal handler for ctrl-c
|
bgneal@863
|
375 signal.signal(signal.SIGINT, signal_handler)
|
bgneal@863
|
376
|
bgneal@881
|
377 # Create bucket to upload photos
|
bgneal@881
|
378 global bucket
|
bgneal@881
|
379 bucket = S3Bucket(access_key=settings.USER_PHOTOS_ACCESS_KEY,
|
bgneal@881
|
380 secret_key=settings.USER_PHOTOS_SECRET_KEY,
|
bgneal@881
|
381 base_url=PHOTO_BASE_URL,
|
bgneal@881
|
382 bucket_name=PHOTO_BUCKET_NAME)
|
bgneal@887
|
383
|
bgneal@899
|
384 # Load cached info from previous runs
|
bgneal@899
|
385 load_cache()
|
bgneal@899
|
386
|
bgneal@887
|
387 if i is None:
|
bgneal@887
|
388 i = 0
|
bgneal@887
|
389
|
bgneal@895
|
390 count = 0
|
bgneal@881
|
391 for n, model in enumerate(qs.iterator()):
|
bgneal@863
|
392 if quit_flag:
|
bgneal@863
|
393 logger.warning("SIGINT received, exiting")
|
bgneal@881
|
394 break
|
bgneal@881
|
395 logger.info("Processing %s #%d (pk = %d)", model_name, n + i, model.pk)
|
bgneal@987
|
396 save_flag = False
|
bgneal@987
|
397 for text_attr in text_attrs:
|
bgneal@987
|
398 txt = getattr(model, text_attr)
|
bgneal@987
|
399
|
bgneal@987
|
400 if html_based:
|
bgneal@987
|
401 new_txt = process_html(txt)
|
bgneal@987
|
402 else:
|
bgneal@987
|
403 new_txt = process_post(txt)
|
bgneal@987
|
404 warn_if_image_refs(txt, model_name, model.pk)
|
bgneal@987
|
405
|
bgneal@987
|
406 if txt != new_txt:
|
bgneal@987
|
407 logger.info("Content changed on %s #%d (pk = %d)",
|
bgneal@987
|
408 model_name, n + i, model.pk)
|
bgneal@987
|
409 logger.debug(u"original: %s", txt)
|
bgneal@987
|
410 logger.debug(u"changed: %s", new_txt)
|
bgneal@987
|
411 setattr(model, text_attr, new_txt)
|
bgneal@987
|
412 save_flag = True
|
bgneal@1012
|
413 elif not html_based and hasattr(model, 'html') and html_check(model.html):
|
bgneal@987
|
414 # Check for content generated with older smiley code that used
|
bgneal@987
|
415 # absolute URLs for the smiley images. If True, then just save
|
bgneal@987
|
416 # the model again to force updated HTML to be created.
|
bgneal@987
|
417 logger.info("Older Smiley HTML detected, forcing a save")
|
bgneal@987
|
418 save_flag = True
|
bgneal@987
|
419
|
bgneal@987
|
420 if save_flag:
|
bgneal@1012
|
421 model.save(**save_kwargs)
|
bgneal@895
|
422 count += 1
|
bgneal@860
|
423
|
bgneal@895
|
424 time_finished = datetime.datetime.now()
|
bgneal@895
|
425 elapsed = time_finished - time_started
|
bgneal@895
|
426 logger.info("ssl_images exiting; number of objects: %d; elapsed: %s",
|
bgneal@895
|
427 count, elapsed)
|
bgneal@897
|
428
|
bgneal@897
|
429 http_images = len(url_cache)
|
bgneal@897
|
430 https_images = sum(1 for v in url_cache.itervalues() if v)
|
bgneal@897
|
431 bad_images = http_images - https_images
|
bgneal@897
|
432 if http_images > 0:
|
bgneal@897
|
433 pct_saved = float(https_images) / http_images * 100.0
|
bgneal@897
|
434 else:
|
bgneal@897
|
435 pct_saved = 0.0
|
bgneal@897
|
436
|
bgneal@897
|
437 logger.info("Summary: http: %d; https: %d; lost: %d; saved: %3.1f %%",
|
bgneal@897
|
438 http_images, https_images, bad_images, pct_saved)
|
bgneal@899
|
439
|
bgneal@899
|
440 save_cache()
|
bgneal@899
|
441 logger.info("ssl_images done")
|
bgneal@899
|
442
|
bgneal@899
|
443
|
bgneal@899
|
444 def load_cache():
|
bgneal@899
|
445 """Load cache from previous runs."""
|
bgneal@899
|
446 logger.info("Loading cached information")
|
bgneal@899
|
447 try:
|
bgneal@899
|
448 with open(CACHE_FILENAME, 'r') as fp:
|
bgneal@899
|
449 d = json.load(fp)
|
bgneal@899
|
450 except IOError as ex:
|
bgneal@899
|
451 logger.error("Cache file (%s) IOError: %s", CACHE_FILENAME, ex)
|
bgneal@899
|
452 return
|
bgneal@899
|
453 except ValueError:
|
bgneal@899
|
454 logger.error("Mangled cache file: %s", CACHE_FILENAME)
|
bgneal@899
|
455 return
|
bgneal@899
|
456
|
bgneal@899
|
457 global bad_hosts, url_cache
|
bgneal@899
|
458 try:
|
bgneal@899
|
459 bad_hosts = set(d['bad_hosts'])
|
bgneal@899
|
460 url_cache = d['url_cache']
|
bgneal@899
|
461 except KeyError:
|
bgneal@899
|
462 logger.error("Malformed cache file: %s", CACHE_FILENAME)
|
bgneal@899
|
463
|
bgneal@899
|
464
|
bgneal@899
|
465 def save_cache():
|
bgneal@899
|
466 """Save our cache to a file for future runs."""
|
bgneal@899
|
467 logger.info("Saving cached information")
|
bgneal@899
|
468 d = {'bad_hosts': list(bad_hosts), 'url_cache': url_cache}
|
bgneal@899
|
469 with open(CACHE_FILENAME, 'w') as fp:
|
bgneal@899
|
470 json.dump(d, fp, indent=4)
|