diff options
author | Luke Shumaker <lukeshu@sbcglobal.net> | 2015-04-13 21:10:05 -0400 |
---|---|---|
committer | Luke Shumaker <lukeshu@sbcglobal.net> | 2015-04-13 21:10:05 -0400 |
commit | 67b87ed4c0abd593e2833b3aa6b969c2e2451b60 (patch) | |
tree | e6d3ad84990f78b23cd291543cd601951117a822 /mirrors | |
parent | c910b9220c45eb9d493d260f325efda2141205a4 (diff) | |
parent | 69d771978bcf7d70d106c3e704fde203451fd48e (diff) |
Merge tag 'release_2014-10-21' into archweb-generic
More tweaks and changes
Diffstat (limited to 'mirrors')
-rw-r--r-- | mirrors/utils.py | 69 | ||||
-rw-r--r-- | mirrors/views.py | 19 |
2 files changed, 39 insertions, 49 deletions
diff --git a/mirrors/utils.py b/mirrors/utils.py index fe18cd6a..51daf50e 100644 --- a/mirrors/utils.py +++ b/mirrors/utils.py @@ -21,9 +21,7 @@ def dictfetchall(cursor): for row in cursor.fetchall() ] -@cache_function(178) -def status_data(cutoff=DEFAULT_CUTOFF, mirror_id=None): - cutoff_time = now() - cutoff +def status_data(cutoff_time, mirror_id=None): if mirror_id is not None: params = [cutoff_time, mirror_id] mirror_where = 'AND u.mirror_id = %s' @@ -84,19 +82,16 @@ GROUP BY l.url_id, u.mirror_id def annotate_url(url, url_data): '''Given a MirrorURL object, add a few more attributes to it regarding status, including completion_pct, delay, and score.''' - known_attrs = ( - ('success_count', 0), - ('check_count', 0), - ('completion_pct', None), - ('duration_avg', None), - ('duration_stddev', None), - ('last_check', None), - ('last_sync', None), - ('delay', None), - ('score', None), - ) - for k, v in known_attrs: - setattr(url, k, v) + # set up some sane default values in case we are missing anything + url.success_count = 0 + url.check_count = 0 + url.completion_pct = None + url.duration_avg = None + url.duration_stddev = None + url.last_check = None + url.last_sync = None + url.delay = None + url.score = None for k, v in url_data.items(): if k not in ('url_id', 'mirror_id'): setattr(url, k, v) @@ -107,7 +102,7 @@ def annotate_url(url, url_data): if url.delay is not None: hours = url.delay.days * 24.0 + url.delay.seconds / 3600.0 - if url.completion_pct > 0: + if url.completion_pct > 0.0: divisor = url.completion_pct else: # arbitrary small value @@ -115,28 +110,26 @@ def annotate_url(url, url_data): stddev = url.duration_stddev or 0.0 url.score = (hours + url.duration_avg + stddev) / divisor + return url + +@cache_function(178) def get_mirror_statuses(cutoff=DEFAULT_CUTOFF, mirror_id=None, show_all=False): cutoff_time = now() - cutoff - valid_urls = MirrorUrl.objects.filter( - logs__check_time__gte=cutoff_time).distinct() - + urls = MirrorUrl.objects.select_related( + 'mirror', 'protocol').order_by('mirror__id', 'url') if mirror_id: - valid_urls = valid_urls.filter(mirror_id=mirror_id) + urls = urls.filter(mirror_id=mirror_id) if not show_all: - valid_urls = valid_urls.filter(active=True, mirror__active=True, + urls = urls.filter(active=True, mirror__active=True, mirror__public=True) - url_data = status_data(cutoff, mirror_id) - urls = MirrorUrl.objects.select_related('mirror', 'protocol').filter( - id__in=valid_urls).order_by('mirror__id', 'url') - if urls: - for url in urls: - annotate_url(url, url_data.get(url.id, {})) - last_check = max([u.last_check for u in urls if u.last_check]) - num_checks = max([u.check_count for u in urls]) + url_data = status_data(cutoff_time, mirror_id) + urls = [annotate_url(url, url_data.get(url.id, {})) for url in urls] + last_check = max([u.last_check for u in urls if u.last_check] or [None]) + num_checks = max(u.check_count for u in urls) check_info = MirrorLog.objects.filter(check_time__gte=cutoff_time) if mirror_id: check_info = check_info.filter(url__mirror_id=mirror_id) @@ -195,23 +188,21 @@ def get_mirror_url_for_download(cutoff=DEFAULT_CUTOFF): if status_data['check_time__max'] is not None: min_check_time = status_data['check_time__max'] - timedelta(minutes=5) min_sync_time = status_data['last_sync__max'] - timedelta(minutes=20) - best_logs = MirrorLog.objects.filter(is_success=True, + best_logs = MirrorLog.objects.select_related('url').filter( + is_success=True, check_time__gte=min_check_time, last_sync__gte=min_sync_time, url__active=True, url__mirror__public=True, url__mirror__active=True, url__protocol__default=True).order_by( 'duration')[:1] if best_logs: - return MirrorUrl.objects.get(id=best_logs[0].url_id) + return best_logs[0].url mirror_urls = MirrorUrl.objects.filter(active=True, - mirror__public=True, mirror__active=True, protocol__default=True) - # look first for a country-agnostic URL, then fall back to any HTTP URL - filtered_urls = mirror_urls.filter(country='')[:1] - if not filtered_urls: - filtered_urls = mirror_urls[:1] - if not filtered_urls: + mirror__public=True, mirror__active=True, + protocol__default=True)[:1] + if not mirror_urls: return None - return filtered_urls[0] + return mirror_urls[0] # vim: set ts=4 sw=4 et: diff --git a/mirrors/views.py b/mirrors/views.py index 55c40c4d..c2736da8 100644 --- a/mirrors/views.py +++ b/mirrors/views.py @@ -11,6 +11,7 @@ from django.db.models import Q from django.http import Http404, HttpResponse from django.shortcuts import get_object_or_404, redirect, render from django.utils.timezone import now +from django.views.decorators.cache import cache_page from django.views.decorators.csrf import csrf_exempt from django.views.decorators.http import condition from django_countries.data import COUNTRIES @@ -245,7 +246,10 @@ def status(request, tier=None): if tier is not None and url.mirror.tier != tier: continue # split them into good and bad lists based on delay - if not url.delay or url.delay > bad_timedelta: + if url.completion_pct is None: + # skip URLs that have never been checked + continue + elif not url.delay or url.delay > bad_timedelta: bad_urls.append(url) else: good_urls.append(url) @@ -275,9 +279,6 @@ class MirrorStatusJSONEncoder(DjangoJSONEncoder): if isinstance(obj, timedelta): # always returned as integer seconds return obj.days * 24 * 3600 + obj.seconds - if hasattr(obj, '__iter__'): - # mainly for queryset serialization - return list(obj) if isinstance(obj, MirrorUrl): data = {attr: getattr(obj, attr) for attr in self.url_attributes} country = obj.country @@ -298,14 +299,15 @@ class ExtendedMirrorStatusJSONEncoder(MirrorStatusJSONEncoder): if isinstance(obj, MirrorUrl): data = super(ExtendedMirrorStatusJSONEncoder, self).default(obj) cutoff = now() - DEFAULT_CUTOFF - data['logs'] = obj.logs.filter( - check_time__gte=cutoff).order_by('check_time') + data['logs'] = list(obj.logs.filter( + check_time__gte=cutoff).order_by('check_time')) return data if isinstance(obj, MirrorLog): return {attr: getattr(obj, attr) for attr in self.log_attributes} return super(ExtendedMirrorStatusJSONEncoder, self).default(obj) +@cache_page(67) @condition(last_modified_func=status_last_modified) def status_json(request, tier=None): if tier is not None: @@ -326,9 +328,6 @@ class LocationJSONEncoder(DjangoJSONEncoder): '''Base JSONEncoder extended to handle CheckLocation objects.''' def default(self, obj): - if hasattr(obj, '__iter__'): - # mainly for queryset serialization - return list(obj) if isinstance(obj, CheckLocation): return { 'id': obj.pk, @@ -344,7 +343,7 @@ class LocationJSONEncoder(DjangoJSONEncoder): def locations_json(request): data = {} data['version'] = 1 - data['locations'] = CheckLocation.objects.all().order_by('pk') + data['locations'] = list(CheckLocation.objects.all().order_by('pk')) to_json = json.dumps(data, ensure_ascii=False, cls=LocationJSONEncoder) response = HttpResponse(to_json, content_type='application/json') return response |