Small performance tweaks to mirror status JSON encoding

Do a few things to speed up the encoding of the JSON, including better
usage of list comprehensions, less dynamic setattr() usage, and removal
of the queryset specialization since we can easily do it outside of the
encoder.

Signed-off-by: Dan McGee <dan@archlinux.org>
This commit is contained in:
Dan McGee 2014-10-21 09:10:28 -05:00
parent f9f8683799
commit ee6bf27820
2 changed files with 16 additions and 21 deletions

View File

@ -84,19 +84,16 @@ def status_data(cutoff=DEFAULT_CUTOFF, mirror_id=None):
def annotate_url(url, url_data):
'''Given a MirrorURL object, add a few more attributes to it regarding
status, including completion_pct, delay, and score.'''
known_attrs = (
('success_count', 0),
('check_count', 0),
('completion_pct', None),
('duration_avg', None),
('duration_stddev', None),
('last_check', None),
('last_sync', None),
('delay', None),
('score', None),
)
for k, v in known_attrs:
setattr(url, k, v)
# set up some sane default values in case we are missing anything
url.success_count = 0
url.check_count = 0
url.completion_pct = None
url.duration_avg = None
url.duration_stddev = None
url.last_check = None
url.last_sync = None
url.delay = None
url.score = None
for k, v in url_data.items():
if k not in ('url_id', 'mirror_id'):
setattr(url, k, v)
@ -107,7 +104,7 @@ def annotate_url(url, url_data):
if url.delay is not None:
hours = url.delay.days * 24.0 + url.delay.seconds / 3600.0
if url.completion_pct > 0:
if url.completion_pct > 0.0:
divisor = url.completion_pct
else:
# arbitrary small value
@ -115,6 +112,8 @@ def annotate_url(url, url_data):
stddev = url.duration_stddev or 0.0
url.score = (hours + url.duration_avg + stddev) / divisor
return url
def get_mirror_statuses(cutoff=DEFAULT_CUTOFF, mirror_id=None, show_all=False):
cutoff_time = now() - cutoff
@ -133,8 +132,7 @@ def get_mirror_statuses(cutoff=DEFAULT_CUTOFF, mirror_id=None, show_all=False):
id__in=valid_urls).order_by('mirror__id', 'url')
if urls:
for url in urls:
annotate_url(url, url_data.get(url.id, {}))
urls = [annotate_url(url, url_data.get(url.id, {})) for url in urls]
last_check = max([u.last_check for u in urls if u.last_check])
num_checks = max([u.check_count for u in urls])
check_info = MirrorLog.objects.filter(check_time__gte=cutoff_time)

View File

@ -275,9 +275,6 @@ def default(self, obj):
if isinstance(obj, timedelta):
# always returned as integer seconds
return obj.days * 24 * 3600 + obj.seconds
if hasattr(obj, '__iter__'):
# mainly for queryset serialization
return list(obj)
if isinstance(obj, MirrorUrl):
data = {attr: getattr(obj, attr) for attr in self.url_attributes}
country = obj.country
@ -298,8 +295,8 @@ def default(self, obj):
if isinstance(obj, MirrorUrl):
data = super(ExtendedMirrorStatusJSONEncoder, self).default(obj)
cutoff = now() - DEFAULT_CUTOFF
data['logs'] = obj.logs.filter(
check_time__gte=cutoff).order_by('check_time')
data['logs'] = list(obj.logs.filter(
check_time__gte=cutoff).order_by('check_time'))
return data
if isinstance(obj, MirrorLog):
return {attr: getattr(obj, attr) for attr in self.log_attributes}