From badc535aeb1d310a9b8aa59aade07045e6eae653 Mon Sep 17 00:00:00 2001 From: Dan McGee Date: Wed, 18 Apr 2012 15:05:43 -0500 Subject: Ensure order_by default value is cleared when using distinct() Otherwise the queryset returns nonsensical results. I find the design of this less than obvious but so be it; we can ensure the results work regardless of a default ordering on the model. Signed-off-by: Dan McGee --- packages/utils.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) (limited to 'packages/utils.py') diff --git a/packages/utils.py b/packages/utils.py index a3c13b17..8d00bd68 100644 --- a/packages/utils.py +++ b/packages/utils.py @@ -218,7 +218,8 @@ def attach_maintainers(packages): packages = list(packages) pkgbases = set(p.pkgbase for p in packages) rels = PackageRelation.objects.filter(type=PackageRelation.MAINTAINER, - pkgbase__in=pkgbases).values_list('pkgbase', 'user_id').distinct() + pkgbase__in=pkgbases).values_list( + 'pkgbase', 'user_id').order_by().distinct() # get all the user objects we will need user_ids = set(rel[1] for rel in rels) -- cgit v1.2.3-54-g00ecf From 72a92102df4999dbcc370064707c9026d51c4fe7 Mon Sep 17 00:00:00 2001 From: Dan McGee Date: Fri, 18 May 2012 21:29:03 -0500 Subject: Switch to usage of new Depend object Signed-off-by: Dan McGee --- devel/management/commands/reporead.py | 18 +++++++++------- devel/views.py | 6 +++--- main/models.py | 7 +++--- packages/models.py | 34 ++++++++++++++++++++++++++---- packages/utils.py | 6 +++--- templates/packages/details_depend.html | 6 +++--- templates/packages/details_requiredby.html | 2 +- 7 files changed, 54 insertions(+), 25 deletions(-) (limited to 'packages/utils.py') diff --git a/devel/management/commands/reporead.py b/devel/management/commands/reporead.py index fd8e3979..47294d9a 100644 --- a/devel/management/commands/reporead.py +++ b/devel/management/commands/reporead.py @@ -29,9 +29,9 @@ from django.db.utils import IntegrityError from devel.utils import UserFinder -from main.models import Arch, Package, PackageDepend, PackageFile, Repo +from main.models import Arch, Package, PackageFile, Repo from main.utils import utc_now -from packages.models import Conflict, Provision, Replacement +from packages.models import Depend, Conflict, Provision, Replacement logging.basicConfig( @@ -141,19 +141,21 @@ def full_version(self): return u'%s-%s' % (self.ver, self.rel) -DEPEND_RE = re.compile(r"^(.+?)((>=|<=|=|>|<)(.*))?$") +DEPEND_RE = re.compile(r"^(.+?)((>=|<=|=|>|<)(.+))?$") def create_depend(package, dep_str, optional=False): - depend = PackageDepend(pkg=package, optional=optional) + depend = Depend(pkg=package, optional=optional) # lop off any description first parts = dep_str.split(':', 1) if len(parts) > 1: depend.description = parts[1].strip() match = DEPEND_RE.match(parts[0].strip()) if match: - depend.depname = match.group(1) - if match.group(2): - depend.depvcmp = match.group(2) + depend.name = match.group(1) + if match.group(3): + depend.comparison = match.group(3) + if match.group(4): + related.version = match.group(4) else: logger.warning('Package %s had unparsable depend string %s', package.pkgname, dep_str) @@ -232,7 +234,7 @@ def populate_pkg(dbpkg, repopkg, force=False, timestamp=None): dbpkg.depends.all().delete() deps = [create_depend(dbpkg, y) for y in repopkg.depends] deps += [create_depend(dbpkg, y, True) for y in repopkg.optdepends] - PackageDepend.objects.bulk_create(deps) + Depend.objects.bulk_create(deps) dbpkg.conflicts.all().delete() conflicts = [create_related(Conflict, dbpkg, y) for y in repopkg.conflicts] diff --git a/devel/views.py b/devel/views.py index 0f1c8d15..16b6acc6 100644 --- a/devel/views.py +++ b/devel/views.py @@ -26,11 +26,11 @@ from django.utils.http import http_date from .models import UserProfile -from main.models import Package, PackageDepend, PackageFile, TodolistPkg +from main.models import Package, PackageFile, TodolistPkg from main.models import Arch, Repo from main.utils import utc_now from news.models import News -from packages.models import PackageRelation, Signoff +from packages.models import PackageRelation, Signoff, Depend from packages.utils import get_signoff_groups from todolists.utils import get_annotated_todolists from .utils import get_annotated_maintainers, UserFinder @@ -267,7 +267,7 @@ def report(request, report_name, username=None): elif report_name == 'unneeded-orphans': title = 'Orphan packages required by no other packages' owned = PackageRelation.objects.all().values('pkgbase') - required = PackageDepend.objects.all().values('depname') + required = Depend.objects.all().values('name') # The two separate calls to exclude is required to do the right thing packages = packages.exclude(pkgbase__in=owned).exclude( pkgname__in=required) diff --git a/main/models.py b/main/models.py index 4b445dd0..f17d4a4d 100644 --- a/main/models.py +++ b/main/models.py @@ -180,11 +180,12 @@ def get_requiredby(self): list slim by including the corresponding package in the same testing category as this package if that check makes sense. """ + from packages.models import Depend provides = set(self.provides.values_list('name', flat=True)) provides.add(self.pkgname) - requiredby = PackageDepend.objects.select_related('pkg', + requiredby = Depend.objects.select_related('pkg', 'pkg__arch', 'pkg__repo').filter( - depname__in=provides).order_by( + name__in=provides).order_by( 'pkg__pkgname', 'pkg__arch__name', 'pkg__repo__name') if not self.arch.agnostic: # make sure we match architectures if possible @@ -232,7 +233,7 @@ def get_depends(self): deps = [] arches = None # TODO: we can use list comprehension and an 'in' query to make this more effective - for dep in self.depends.order_by('optional', 'depname'): + for dep in self.depends.order_by('optional', 'name'): pkg = dep.get_best_satisfier() providers = None if not pkg: diff --git a/packages/models.py b/packages/models.py index c7b1cab4..cb65f1f1 100644 --- a/packages/models.py +++ b/packages/models.py @@ -228,10 +228,6 @@ def get_best_satisfier(self): '''Find a satisfier for this related package that best matches the given criteria. It will not search provisions, but will find packages named and matching repo characteristics if possible.''' - # NOTE: this is cribbed directly from the PackageDepend method of the - # same name. Really, all of these things could use the same method if - # the PackageDepend class was moved here and field names were changed - # to match the layout we use here. pkgs = Package.objects.normal().filter(pkgname=self.name) if not self.pkg.arch.agnostic: # make sure we match architectures if possible @@ -258,6 +254,36 @@ def get_best_satisfier(self): return pkg + def get_providers(self): + '''Return providers of this related package. Does *not* include exact + matches as it checks the Provision names only, use get_best_satisfier() + instead for exact matches.''' + pkgs = Package.objects.normal().filter( + provides__name=self.name).order_by().distinct() + if not self.pkg.arch.agnostic: + # make sure we match architectures if possible + arches = self.pkg.applicable_arches() + pkgs = pkgs.filter(arch__in=arches) + + # Logic here is to filter out packages that are in multiple repos if + # they are not requested. For example, if testing is False, only show a + # testing package if it doesn't exist in a non-testing repo. + filtered = {} + for package in pkgs: + if package.pkgname not in filtered or \ + package.repo.staging == self.pkg.repo.staging: + filtered[package.pkgname] = package + pkgs = filtered.values() + + filtered = {} + for package in pkgs: + if package.pkgname not in filtered or \ + package.repo.testing == self.pkg.repo.testing: + filtered[package.pkgname] = package + pkgs = filtered.values() + + return pkgs + def __unicode__(self): if self.version: return u'%s%s%s' % (self.name, self.comparison, self.version) diff --git a/packages/utils.py b/packages/utils.py index 8d00bd68..82313472 100644 --- a/packages/utils.py +++ b/packages/utils.py @@ -7,10 +7,10 @@ from django.db.models import Count, Max, F from django.contrib.auth.models import User -from main.models import Package, PackageDepend, PackageFile, Arch, Repo +from main.models import Package, PackageFile, Arch, Repo from main.utils import cache_function, groupby_preserve_order, PackageStandin from .models import (PackageGroup, PackageRelation, - License, Conflict, Provision, Replacement, + License, Depend, Conflict, Provision, Replacement, SignoffSpecification, Signoff, DEFAULT_SIGNOFF_SPEC) @cache_function(127) @@ -451,7 +451,7 @@ def default(self, obj): return obj.name.lower() if isinstance(obj, (PackageGroup, License)): return obj.name - if isinstance(obj, (Conflict, Provision, Replacement, PackageDepend)): + if isinstance(obj, (Depend, Conflict, Provision, Replacement)): return unicode(obj) elif isinstance(obj, User): return obj.username diff --git a/templates/packages/details_depend.html b/templates/packages/details_depend.html index 8b6e85c9..0cf2c36a 100644 --- a/templates/packages/details_depend.html +++ b/templates/packages/details_depend.html @@ -2,12 +2,12 @@
  • {% ifequal depend.pkg None %} {% if depend.providers %} -{{ depend.dep.depname }} ({% multi_pkg_details depend.providers %}) +{{ depend.dep.name }}{{ depend.dep.comparison|default:"" }}{{ depend.dep.version|default:"" }} ({% multi_pkg_details depend.providers %}) {% else %} -{{ depend.dep.depname }} (virtual) +{{ depend.dep.name }}{{ depend.dep.comparison|default:"" }}{{ depend.dep.version|default:"" }} (virtual) {% endif %} {% else %} -{% pkg_details_link depend.pkg %}{{ depend.dep.depvcmp|default:"" }} +{% pkg_details_link depend.pkg %}{{ depend.dep.comparison|default:"" }}{{ depend.dep.version|default:"" }} {% if depend.pkg.repo.testing %} (testing){% endif %} {% if depend.pkg.repo.staging %} (staging){% endif %} {% endifequal %} diff --git a/templates/packages/details_requiredby.html b/templates/packages/details_requiredby.html index c7697289..ecc92b29 100644 --- a/templates/packages/details_requiredby.html +++ b/templates/packages/details_requiredby.html @@ -1,6 +1,6 @@ {% load package_extras %}
  • {% pkg_details_link req.pkg %} -{% if req.depname != pkg.pkgname %}(requires {{ req.depname }}){% endif %} +{% if req.name != pkg.pkgname %}(requires {{ req.name }}){% endif %} {% if req.pkg.repo.testing %}(testing){% endif %} {% if req.pkg.repo.staging %}(staging){% endif %} {% if req.optional %}(optional){% endif %} -- cgit v1.2.3-54-g00ecf From b95b0cd4197d70831754a7e81b40388c37ab1a3d Mon Sep 17 00:00:00 2001 From: Dan McGee Date: Sun, 8 Jul 2012 20:51:23 -0500 Subject: Use a set instead of list when gathering package IDs to fetch If we have duplicates in this list, it makes no sense to include them in the list we send to the database. Signed-off-by: Dan McGee --- packages/utils.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) (limited to 'packages/utils.py') diff --git a/packages/utils.py b/packages/utils.py index 82313472..b86b6eba 100644 --- a/packages/utils.py +++ b/packages/utils.py @@ -123,7 +123,7 @@ def get_differences_info(arch_a, arch_b): cursor.execute(sql, [arch_a.id, arch_b.id]) results = cursor.fetchall() # column A will always have a value, column B might be NULL - to_fetch = [row[0] for row in results] + to_fetch = set(row[0] for row in results) # fetch all of the necessary packages pkgs = Package.objects.normal().in_bulk(to_fetch) # now build a list of tuples containing differences -- cgit v1.2.3-54-g00ecf From 3c906888e2ba9e55cef00dfc61667fb383c9754d Mon Sep 17 00:00:00 2001 From: Dan McGee Date: Sun, 8 Jul 2012 20:44:07 -0500 Subject: Get multilib package differences query working on sqlite3 Thank you database engines for all implementing such simple operations as substring() and length() in different ways. Signed-off-by: Dan McGee --- packages/utils.py | 31 ++++++++++++++++++++++--------- 1 file changed, 22 insertions(+), 9 deletions(-) (limited to 'packages/utils.py') diff --git a/packages/utils.py b/packages/utils.py index b86b6eba..6d54d71a 100644 --- a/packages/utils.py +++ b/packages/utils.py @@ -8,7 +8,8 @@ from django.contrib.auth.models import User from main.models import Package, PackageFile, Arch, Repo -from main.utils import cache_function, groupby_preserve_order, PackageStandin +from main.utils import (cache_function, database_vendor, + groupby_preserve_order, PackageStandin) from .models import (PackageGroup, PackageRelation, License, Depend, Conflict, Provision, Replacement, SignoffSpecification, Signoff, DEFAULT_SIGNOFF_SPEC) @@ -150,12 +151,18 @@ def get_differences_info(arch_a, arch_b): def multilib_differences(): # Query for checking multilib out of date-ness - sql = """ -SELECT ml.id, reg.id - FROM packages ml - JOIN packages reg - ON ( - reg.pkgname = ( + if database_vendor(Package) == 'sqlite': + pkgname_sql = """ + CASE WHEN ml.pkgname LIKE %s + THEN SUBSTR(ml.pkgname, 7) + WHEN ml.pkgname LIKE %s + THEN SUBSTR(ml.pkgname, 1, LENGTH(ml.pkgname) - 9) + ELSE + ml.pkgname + END + """ + else: + pkgname_sql = """ CASE WHEN ml.pkgname LIKE %s THEN SUBSTRING(ml.pkgname, 7) WHEN ml.pkgname LIKE %s @@ -163,7 +170,13 @@ def multilib_differences(): ELSE ml.pkgname END - ) + """ + sql = """ +SELECT ml.id, reg.id + FROM packages ml + JOIN packages reg + ON ( + reg.pkgname = (""" + pkgname_sql + """) AND reg.pkgver != ml.pkgver ) JOIN repos r ON reg.repo_id = r.id @@ -172,7 +185,7 @@ def multilib_differences(): AND r.staging = %s AND reg.arch_id = %s ORDER BY ml.last_update -""" + """ multilib = Repo.objects.get(name__iexact='multilib') i686 = Arch.objects.get(name='i686') params = ['lib32-%', '%-multilib', multilib.id, False, False, i686.id] -- cgit v1.2.3-54-g00ecf From 241ff8fbd79f9f17cd326a34eb39096851f630ba Mon Sep 17 00:00:00 2001 From: Dan McGee Date: Wed, 8 Aug 2012 22:07:06 -0500 Subject: Extract parse_version function from reporead logic Signed-off-by: Dan McGee --- devel/management/commands/reporead.py | 9 ++------- packages/utils.py | 18 ++++++++++++++++++ 2 files changed, 20 insertions(+), 7 deletions(-) (limited to 'packages/utils.py') diff --git a/devel/management/commands/reporead.py b/devel/management/commands/reporead.py index 8b55b09a..af0a2dc0 100644 --- a/devel/management/commands/reporead.py +++ b/devel/management/commands/reporead.py @@ -34,6 +34,7 @@ from main.models import Arch, Package, PackageFile, Repo from main.utils import database_vendor from packages.models import Depend, Conflict, Provision, Replacement, Update +from packages.utils import parse_version logging.basicConfig( @@ -84,8 +85,6 @@ class RepoPackage(object): 'conflicts', 'provides', 'replaces', 'groups', 'license', 'files' ) - version_re = re.compile(r'^((\d+):)?(.+)-([^-]+)$') - def __init__(self, repo): self.repo = repo self.ver = None @@ -112,11 +111,7 @@ def populate(self, values): # do NOT prune these values at all setattr(self, k, v[0]) elif k == 'version': - match = self.version_re.match(v[0]) - self.ver = match.group(3) - self.rel = match.group(4) - if match.group(2): - self.epoch = int(match.group(2)) + self.ver, self.rel, self.epoch = parse_version(v[0]) elif k == 'builddate': try: builddate = datetime.utcfromtimestamp(int(v[0])) diff --git a/packages/utils.py b/packages/utils.py index 6d54d71a..d4b4e611 100644 --- a/packages/utils.py +++ b/packages/utils.py @@ -1,6 +1,7 @@ from collections import defaultdict from itertools import chain from operator import itemgetter +import re from django.core.serializers.json import DjangoJSONEncoder from django.db import connection @@ -14,6 +15,23 @@ License, Depend, Conflict, Provision, Replacement, SignoffSpecification, Signoff, DEFAULT_SIGNOFF_SPEC) + +VERSION_RE = re.compile(r'^((\d+):)?(.+)-([^-]+)$') + + +def parse_version(version): + match = VERSION_RE.match(version) + if not match: + return None, None, 0 + ver = match.group(3) + rel = match.group(4) + if match.group(2): + epoch = int(match.group(2)) + else: + epoch = 0 + return ver, rel, epoch + + @cache_function(127) def get_group_info(include_arches=None): raw_groups = PackageGroup.objects.values_list( -- cgit v1.2.3-54-g00ecf From ad05f3eb2c8511c63dbdc9378bf3561ab949e940 Mon Sep 17 00:00:00 2001 From: Dan McGee Date: Wed, 8 Aug 2012 22:07:38 -0500 Subject: PEP8 cleanups in package utils Signed-off-by: Dan McGee --- packages/utils.py | 17 +++++++++++------ 1 file changed, 11 insertions(+), 6 deletions(-) (limited to 'packages/utils.py') diff --git a/packages/utils.py b/packages/utils.py index d4b4e611..d95c015f 100644 --- a/packages/utils.py +++ b/packages/utils.py @@ -36,7 +36,7 @@ def parse_version(version): def get_group_info(include_arches=None): raw_groups = PackageGroup.objects.values_list( 'name', 'pkg__arch__name').order_by('name').annotate( - cnt=Count('pkg'), last_update=Max('pkg__last_update')) + cnt=Count('pkg'), last_update=Max('pkg__last_update')) # now for post_processing. we need to seperate things out and add # the count in for 'any' to all of the other architectures. group_mapping = {} @@ -71,6 +71,7 @@ def get_group_info(include_arches=None): groups.extend(val.itervalues()) return sorted(groups, key=itemgetter('name', 'arch')) + def get_split_packages_info(): '''Return info on split packages that do not have an actual package name matching the split pkgbase.''' @@ -276,6 +277,7 @@ def approved_by_signoffs(signoffs, spec): return good_signoffs >= spec.required return False + class PackageSignoffGroup(object): '''Encompasses all packages in testing with the same pkgbase.''' def __init__(self, packages): @@ -375,6 +377,7 @@ def __unicode__(self): AND p.repo_id IN (%s) """ + def get_current_signoffs(repos): '''Returns a mapping of pkgbase -> signoff objects for the given repos.''' cursor = connection.cursor() @@ -389,6 +392,7 @@ def get_current_signoffs(repos): signoffs = Signoff.objects.select_related('user').in_bulk(to_fetch) return signoffs.values() + def get_current_specifications(repos): '''Returns a mapping of pkgbase -> signoff specification objects for the given repos.''' @@ -401,6 +405,7 @@ def get_current_specifications(repos): to_fetch = [row[0] for row in results] return SignoffSpecification.objects.in_bulk(to_fetch).values() + def get_target_repo_map(repos): sql = """ SELECT DISTINCT p1.pkgbase, r.name @@ -421,6 +426,7 @@ def get_target_repo_map(repos): cursor.execute(sql, params) return dict(cursor.fetchall()) + def get_signoff_groups(repos=None, user=None): if repos is None: repos = Repo.objects.filter(testing=True) @@ -458,12 +464,12 @@ def get_signoff_groups(repos=None, user=None): class PackageJSONEncoder(DjangoJSONEncoder): - pkg_attributes = [ 'pkgname', 'pkgbase', 'repo', 'arch', 'pkgver', + pkg_attributes = ['pkgname', 'pkgbase', 'repo', 'arch', 'pkgver', 'pkgrel', 'epoch', 'pkgdesc', 'url', 'filename', 'compressed_size', 'installed_size', 'build_date', 'last_update', 'flag_date', - 'maintainers', 'packager' ] - pkg_list_attributes = [ 'groups', 'licenses', 'conflicts', - 'provides', 'replaces', 'depends' ] + 'maintainers', 'packager'] + pkg_list_attributes = ['groups', 'licenses', 'conflicts', + 'provides', 'replaces', 'depends'] def default(self, obj): if hasattr(obj, '__iter__'): @@ -488,5 +494,4 @@ def default(self, obj): return obj.username return super(PackageJSONEncoder, self).default(obj) - # vim: set ts=4 sw=4 et: -- cgit v1.2.3-54-g00ecf From a071d800c6a26d3efcdc0d32fe1adb1cde7e6f31 Mon Sep 17 00:00:00 2001 From: Dan McGee Date: Wed, 15 Aug 2012 08:22:01 -0500 Subject: Fix signoffs SQL query Although the old query returned the same results, the repos IN clause should really be a part of the WHERE, not the JOIN condition. Signed-off-by: Dan McGee --- packages/utils.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) (limited to 'packages/utils.py') diff --git a/packages/utils.py b/packages/utils.py index d95c015f..ee1b56b3 100644 --- a/packages/utils.py +++ b/packages/utils.py @@ -363,6 +363,7 @@ def __unicode__(self): return u'%s-%s (%s): %d' % ( self.pkgbase, self.version, self.arch, len(self.signoffs)) + _SQL_SPEC_OR_SIGNOFF = """ SELECT DISTINCT s.id FROM %s s @@ -374,7 +375,7 @@ def __unicode__(self): AND s.arch_id = p.arch_id AND s.repo_id = p.repo_id ) - AND p.repo_id IN (%s) + WHERE p.repo_id IN (%s) """ -- cgit v1.2.3-54-g00ecf From a71aa2e354599950f4bd464f0f19215f1c581141 Mon Sep 17 00:00:00 2001 From: Dan McGee Date: Fri, 12 Oct 2012 11:34:49 -0500 Subject: Make wrong permissions query more efficient This removes the subplan and per-row query in favor of a LEFT JOIN where we look for non-matching rows. Tested in sqlite3 and PostgreSQL. Signed-off-by: Dan McGee --- packages/utils.py | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) (limited to 'packages/utils.py') diff --git a/packages/utils.py b/packages/utils.py index ee1b56b3..c29e2297 100644 --- a/packages/utils.py +++ b/packages/utils.py @@ -228,12 +228,13 @@ def get_wrong_permissions(): FROM packages p JOIN packages_packagerelation pr ON p.pkgbase = pr.pkgbase WHERE pr.type = %s - ) pkgs - WHERE pkgs.repo_id NOT IN ( - SELECT repo_id FROM user_profiles_allowed_repos ar + ) mp + LEFT JOIN ( + SELECT user_id, repo_id FROM user_profiles_allowed_repos ar INNER JOIN user_profiles up ON ar.userprofile_id = up.id - WHERE up.user_id = pkgs.user_id - ) + ) ur + ON mp.user_id = ur.user_id AND mp.repo_id = ur.repo_id + WHERE ur.user_id IS NULL; """ cursor = connection.cursor() cursor.execute(sql, [PackageRelation.MAINTAINER]) -- cgit v1.2.3-54-g00ecf From 0b3aa29cb63c6ca07f066a4a68fa3df9b92f6216 Mon Sep 17 00:00:00 2001 From: Dan McGee Date: Sun, 14 Oct 2012 15:42:15 -0500 Subject: Refactor signoff-grabbing queries Make them a bit more efficient by adding an explicit condition on both the packages and signoff table for the repo ID, and move the common code into a shared function both can use. Signed-off-by: Dan McGee --- packages/utils.py | 40 ++++++++++++++++++---------------------- 1 file changed, 18 insertions(+), 22 deletions(-) (limited to 'packages/utils.py') diff --git a/packages/utils.py b/packages/utils.py index c29e2297..051fed8e 100644 --- a/packages/utils.py +++ b/packages/utils.py @@ -365,7 +365,8 @@ def __unicode__(self): self.pkgbase, self.version, self.arch, len(self.signoffs)) -_SQL_SPEC_OR_SIGNOFF = """ +def signoffs_id_query(model, repos): + sql = """ SELECT DISTINCT s.id FROM %s s JOIN packages p ON ( @@ -377,34 +378,29 @@ def __unicode__(self): AND s.repo_id = p.repo_id ) WHERE p.repo_id IN (%s) -""" - - -def get_current_signoffs(repos): - '''Returns a mapping of pkgbase -> signoff objects for the given repos.''' + AND s.repo_id IN (%s) + """ cursor = connection.cursor() # query pre-process- fill in table name and placeholders for IN - sql = _SQL_SPEC_OR_SIGNOFF % ('packages_signoff', - ','.join(['%s' for r in repos])) - cursor.execute(sql, [r.pk for r in repos]) + repo_sql = ','.join(['%s' for r in repos]) + sql = sql % (model._meta.db_table, repo_sql, repo_sql) + repo_ids = [r.pk for r in repos] + # repo_ids are needed twice, so double the array + cursor.execute(sql, repo_ids * 2) results = cursor.fetchall() - # fetch all of the returned signoffs by ID - to_fetch = [row[0] for row in results] - signoffs = Signoff.objects.select_related('user').in_bulk(to_fetch) - return signoffs.values() + return [row[0] for row in results] -def get_current_specifications(repos): - '''Returns a mapping of pkgbase -> signoff specification objects for the - given repos.''' - cursor = connection.cursor() - sql = _SQL_SPEC_OR_SIGNOFF % ('packages_signoffspecification', - ','.join(['%s' for r in repos])) - cursor.execute(sql, [r.pk for r in repos]) +def get_current_signoffs(repos): + '''Returns a list of signoff objects for the given repos.''' + to_fetch = signoffs_id_query(Signoff, repos) + return Signoff.objects.select_related('user').in_bulk(to_fetch).values() - results = cursor.fetchall() - to_fetch = [row[0] for row in results] + +def get_current_specifications(repos): + '''Returns a list of signoff specification objects for the given repos.''' + to_fetch = signoffs_id_query(SignoffSpecification, repos) return SignoffSpecification.objects.in_bulk(to_fetch).values() -- cgit v1.2.3-54-g00ecf From 6dd4d54bb0adbbb0f8c2b1beaa92b7a58971cf88 Mon Sep 17 00:00:00 2001 From: Dan McGee Date: Fri, 16 Nov 2012 16:20:11 -0600 Subject: Use Python 2.7 dictionary comprehension syntax Rather than the old idiom of dict((k, v) for <> in <>). Signed-off-by: Dan McGee --- devel/management/commands/reporead.py | 4 ++-- devel/management/commands/reporead_inotify.py | 2 +- mirrors/views.py | 11 ++++------- packages/templatetags/package_extras.py | 2 +- packages/utils.py | 5 ++--- packages/views/signoff.py | 8 +++----- public/views.py | 2 +- visualize/views.py | 4 ++-- 8 files changed, 16 insertions(+), 22 deletions(-) (limited to 'packages/utils.py') diff --git a/devel/management/commands/reporead.py b/devel/management/commands/reporead.py index a1e77b49..3d4e6375 100644 --- a/devel/management/commands/reporead.py +++ b/devel/management/commands/reporead.py @@ -377,7 +377,7 @@ def db_update(archname, reponame, pkgs, force=False): # This makes our inner loop where we find packages by name *way* more # efficient by not having to go to the database for each package to # SELECT them by name. - dbdict = dict((dbpkg.pkgname, dbpkg) for dbpkg in dbpkgs) + dbdict = {dbpkg.pkgname: dbpkg for dbpkg in dbpkgs} dbset = set(dbdict.keys()) syncset = set([pkg.name for pkg in pkgs]) @@ -446,7 +446,7 @@ def filesonly_update(archname, reponame, pkgs, force=False): """ logger.info('Updating files for %s (%s)', reponame, archname) dbpkgs = update_common(archname, reponame, pkgs, False) - dbdict = dict((dbpkg.pkgname, dbpkg) for dbpkg in dbpkgs) + dbdict = {dbpkg.pkgname: dbpkg for dbpkg in dbpkgs} dbset = set(dbdict.keys()) for pkg in (pkg for pkg in pkgs if pkg.name in dbset): diff --git a/devel/management/commands/reporead_inotify.py b/devel/management/commands/reporead_inotify.py index c74762eb..16b3869c 100644 --- a/devel/management/commands/reporead_inotify.py +++ b/devel/management/commands/reporead_inotify.py @@ -69,7 +69,7 @@ def setup_notifier(self): finally builds and returns a notifier object.''' arches = Arch.objects.filter(agnostic=False) repos = Repo.objects.all() - arch_path_map = dict((arch, None) for arch in arches) + arch_path_map = {arch: None for arch in arches} all_paths = set() total_paths = 0 for arch in arches: diff --git a/mirrors/views.py b/mirrors/views.py index 2e1e83b6..d0ce0a97 100644 --- a/mirrors/views.py +++ b/mirrors/views.py @@ -94,7 +94,7 @@ def default_protocol_filter(original_urls): def status_filter(original_urls): status_info = get_mirror_statuses() - scores = dict((u.id, u.score) for u in status_info['urls']) + scores = {u.id: u.score for u in status_info['urls']} urls = [] for u in original_urls: u.score = scores.get(u.id, None) @@ -165,7 +165,7 @@ def mirrors(request): if not request.user.is_authenticated(): mirror_list = mirror_list.filter(public=True, active=True) protos = protos.filter(mirror__public=True, mirror__active=True) - protos = dict((k, list(v)) for k, v in groupby(protos, key=itemgetter(0))) + protos = {k: list(v) for k, v in groupby(protos, key=itemgetter(0))} for mirror in mirror_list: items = protos.get(mirror.id, []) mirror.protocols = [item[1] for item in items] @@ -253,8 +253,7 @@ def default(self, obj): # mainly for queryset serialization return list(obj) if isinstance(obj, MirrorUrl): - data = dict((attr, getattr(obj, attr)) - for attr in self.url_attributes) + data = {attr: getattr(obj, attr) for attr in self.url_attributes} # get any override on the country attribute first country = obj.real_country data['country'] = unicode(country.name) @@ -277,9 +276,7 @@ def default(self, obj): check_time__gte=cutoff).order_by('check_time') return data if isinstance(obj, MirrorLog): - data = dict((attr, getattr(obj, attr)) - for attr in self.log_attributes) - return data + return {attr: getattr(obj, attr) for attr in self.log_attributes} return super(ExtendedMirrorStatusJSONEncoder, self).default(obj) diff --git a/packages/templatetags/package_extras.py b/packages/templatetags/package_extras.py index 994265d8..f3613e69 100644 --- a/packages/templatetags/package_extras.py +++ b/packages/templatetags/package_extras.py @@ -13,7 +13,7 @@ def link_encode(url, query): # massage the data into all utf-8 encoded strings first, so urlencode # doesn't barf at the data we pass it - query = dict((k, unicode(v).encode('utf-8')) for k, v in query.items()) + query = {k: unicode(v).encode('utf-8') for k, v in query.items()} data = urlencode(query).replace('&', '&') return "%s?%s" % (url, data) diff --git a/packages/utils.py b/packages/utils.py index 051fed8e..199e141d 100644 --- a/packages/utils.py +++ b/packages/utils.py @@ -385,7 +385,7 @@ def signoffs_id_query(model, repos): repo_sql = ','.join(['%s' for r in repos]) sql = sql % (model._meta.db_table, repo_sql, repo_sql) repo_ids = [r.pk for r in repos] - # repo_ids are needed twice, so double the array + # repo_ids are needed twice, so double the array cursor.execute(sql, repo_ids * 2) results = cursor.fetchall() @@ -474,8 +474,7 @@ def default(self, obj): # mainly for queryset serialization return list(obj) if isinstance(obj, Package): - data = dict((attr, getattr(obj, attr)) - for attr in self.pkg_attributes) + data = {attr: getattr(obj, attr) for attr in self.pkg_attributes} for attr in self.pkg_list_attributes: data[attr] = getattr(obj, attr).all() return data diff --git a/packages/views/signoff.py b/packages/views/signoff.py index 56eb060c..824a9922 100644 --- a/packages/views/signoff.py +++ b/packages/views/signoff.py @@ -155,8 +155,8 @@ class SignoffJSONEncoder(DjangoJSONEncoder): def default(self, obj): if isinstance(obj, PackageSignoffGroup): - data = dict((attr, getattr(obj, attr)) - for attr in self.signoff_group_attrs) + data = {attr: getattr(obj, attr) + for attr in self.signoff_group_attrs} data['pkgnames'] = [p.pkgname for p in obj.packages] data['package_count'] = len(obj.packages) data['approved'] = obj.approved() @@ -164,9 +164,7 @@ def default(self, obj): for attr in self.signoff_spec_attrs) return data elif isinstance(obj, Signoff): - data = dict((attr, getattr(obj, attr)) - for attr in self.signoff_attrs) - return data + return {attr: getattr(obj, attr) for attr in self.signoff_attrs} elif isinstance(obj, Arch) or isinstance(obj, Repo): return unicode(obj) elif isinstance(obj, User): diff --git a/public/views.py b/public/views.py index 96120761..3e15f9df 100644 --- a/public/views.py +++ b/public/views.py @@ -118,7 +118,7 @@ def keys(request): sig_counts = PGPSignature.objects.filter(not_expired, valid=True, signee__in=user_key_ids).values_list('signer').annotate( Count('signer')) - sig_counts = dict((key_id[-16:], ct) for key_id, ct in sig_counts) + sig_counts = {key_id[-16:]: ct for key_id, ct in sig_counts} for key in master_keys: key.signature_count = sig_counts.get(key.pgp_key[-16:], 0) diff --git a/visualize/views.py b/visualize/views.py index 8d878937..48e8f86b 100644 --- a/visualize/views.py +++ b/visualize/views.py @@ -33,8 +33,8 @@ def build_map(name, arch, repo): # now transform these results into two mappings: one ordered (repo, arch), # and one ordered (arch, repo). - arch_groups = dict((a, build_map(a, a, None)) for a in arches) - repo_groups = dict((r, build_map(r, None, r)) for r in repos) + arch_groups = {a: build_map(a, a, None) for a in arches} + repo_groups = {r: build_map(r, None, r) for r in repos} for row in qs: arch = row['arch__name'] repo = row['repo__name'] -- cgit v1.2.3-54-g00ecf From 9e9157d0a8cbf9ea076231e438fb30f58bff8e29 Mon Sep 17 00:00:00 2001 From: Dan McGee Date: Fri, 16 Nov 2012 16:37:31 -0600 Subject: Use python set comprehension syntax supported in 2.7 Signed-off-by: Dan McGee --- devel/management/commands/import_signatures.py | 4 ++-- devel/management/commands/reporead.py | 2 +- devel/management/commands/reporead_inotify.py | 2 +- devel/views.py | 4 ++-- main/models.py | 2 +- packages/models.py | 2 +- packages/utils.py | 10 +++++----- packages/views/signoff.py | 2 +- todolists/views.py | 8 ++++---- 9 files changed, 18 insertions(+), 18 deletions(-) (limited to 'packages/utils.py') diff --git a/devel/management/commands/import_signatures.py b/devel/management/commands/import_signatures.py index ce1aba90..da1397ca 100644 --- a/devel/management/commands/import_signatures.py +++ b/devel/management/commands/import_signatures.py @@ -98,8 +98,8 @@ def import_signatures(keyring): # now prune the data down to what we actually want. # prune edges not in nodes, remove duplicates, and self-sigs - pruned_edges = set(edge for edge in edges - if edge.signer in nodes and edge.signer != edge.signee) + pruned_edges = {edge for edge in edges + if edge.signer in nodes and edge.signer != edge.signee} logger.info("creating or finding %d signatures", len(pruned_edges)) created_ct = updated_ct = 0 diff --git a/devel/management/commands/reporead.py b/devel/management/commands/reporead.py index 3d4e6375..981c4dce 100644 --- a/devel/management/commands/reporead.py +++ b/devel/management/commands/reporead.py @@ -380,7 +380,7 @@ def db_update(archname, reponame, pkgs, force=False): dbdict = {dbpkg.pkgname: dbpkg for dbpkg in dbpkgs} dbset = set(dbdict.keys()) - syncset = set([pkg.name for pkg in pkgs]) + syncset = {pkg.name for pkg in pkgs} in_sync_not_db = syncset - dbset logger.info("%d packages in sync not db", len(in_sync_not_db)) diff --git a/devel/management/commands/reporead_inotify.py b/devel/management/commands/reporead_inotify.py index 16b3869c..04f65764 100644 --- a/devel/management/commands/reporead_inotify.py +++ b/devel/management/commands/reporead_inotify.py @@ -77,7 +77,7 @@ def setup_notifier(self): for repo in repos) # take a python format string and generate all unique combinations # of directories from it; using set() ensures we filter it down - paths = set(self.path_template % values for values in combos) + paths = {self.path_template % values for values in combos} total_paths += len(paths) all_paths |= paths arch_path_map[arch] = paths diff --git a/devel/views.py b/devel/views.py index 083665d9..7d5947d1 100644 --- a/devel/views.py +++ b/devel/views.py @@ -277,8 +277,8 @@ def report(request, report_name, username=None): else: raise Http404 - arches = set(pkg.arch for pkg in packages) - repos = set(pkg.repo for pkg in packages) + arches = {pkg.arch for pkg in packages} + repos = {pkg.repo for pkg in packages} context = { 'all_maintainers': maints, 'title': title, diff --git a/main/models.py b/main/models.py index 5700cdf1..cc81637c 100644 --- a/main/models.py +++ b/main/models.py @@ -197,7 +197,7 @@ def get_requiredby(self): """ from packages.models import Depend provides = self.provides.all() - provide_names = set(provide.name for provide in provides) + provide_names = {provide.name for provide in provides} provide_names.add(self.pkgname) requiredby = Depend.objects.select_related('pkg', 'pkg__arch', 'pkg__repo').filter( diff --git a/packages/models.py b/packages/models.py index 0d0fbdf2..ede8c275 100644 --- a/packages/models.py +++ b/packages/models.py @@ -33,7 +33,7 @@ def get_associated_packages(self): def repositories(self): packages = self.get_associated_packages() - return sorted(set([p.repo for p in packages])) + return sorted({p.repo for p in packages}) def __unicode__(self): return u'%s: %s (%s)' % ( diff --git a/packages/utils.py b/packages/utils.py index 199e141d..5adc8637 100644 --- a/packages/utils.py +++ b/packages/utils.py @@ -79,8 +79,8 @@ def get_split_packages_info(): split_pkgs = Package.objects.exclude(pkgname=F('pkgbase')).exclude( pkgbase__in=pkgnames).values('pkgbase', 'repo', 'arch').annotate( last_update=Max('last_update')) - all_arches = Arch.objects.in_bulk(set(s['arch'] for s in split_pkgs)) - all_repos = Repo.objects.in_bulk(set(s['repo'] for s in split_pkgs)) + all_arches = Arch.objects.in_bulk({s['arch'] for s in split_pkgs}) + all_repos = Repo.objects.in_bulk({s['repo'] for s in split_pkgs}) for split in split_pkgs: split['arch'] = all_arches[split['arch']] split['repo'] = all_repos[split['repo']] @@ -143,7 +143,7 @@ def get_differences_info(arch_a, arch_b): cursor.execute(sql, [arch_a.id, arch_b.id]) results = cursor.fetchall() # column A will always have a value, column B might be NULL - to_fetch = set(row[0] for row in results) + to_fetch = {row[0] for row in results} # fetch all of the necessary packages pkgs = Package.objects.normal().in_bulk(to_fetch) # now build a list of tuples containing differences @@ -249,13 +249,13 @@ def attach_maintainers(packages): the maintainers and attach them to the packages to prevent N+1 query cascading.''' packages = list(packages) - pkgbases = set(p.pkgbase for p in packages) + pkgbases = {p.pkgbase for p in packages} rels = PackageRelation.objects.filter(type=PackageRelation.MAINTAINER, pkgbase__in=pkgbases).values_list( 'pkgbase', 'user_id').order_by().distinct() # get all the user objects we will need - user_ids = set(rel[1] for rel in rels) + user_ids = {rel[1] for rel in rels} users = User.objects.in_bulk(user_ids) # now build a pkgbase -> [maintainers...] map diff --git a/packages/views/signoff.py b/packages/views/signoff.py index 824a9922..340b2311 100644 --- a/packages/views/signoff.py +++ b/packages/views/signoff.py @@ -25,7 +25,7 @@ def signoffs(request): context = { 'signoff_groups': signoff_groups, 'arches': Arch.objects.all(), - 'repo_names': sorted(set(g.target_repo for g in signoff_groups)), + 'repo_names': sorted({g.target_repo for g in signoff_groups}), } return render(request, 'packages/signoffs.html', context) diff --git a/todolists/views.py b/todolists/views.py index b8d1dae1..9984ef9a 100644 --- a/todolists/views.py +++ b/todolists/views.py @@ -53,8 +53,8 @@ def view(request, list_id): # we don't hold onto the result, but the objects are the same here, # so accessing maintainers in the template is now cheap attach_maintainers(tp.pkg for tp in todolist.packages) - arches = set(tp.pkg.arch for tp in todolist.packages) - repos = set(tp.pkg.repo for tp in todolist.packages) + arches = {tp.pkg.arch for tp in todolist.packages} + repos = {tp.pkg.repo for tp in todolist.packages} return render(request, 'todolists/view.html', { 'list': todolist, 'svn_roots': svn_roots, @@ -67,8 +67,8 @@ def list_pkgbases(request, list_id, svn_root): '''Used to make bulk moves of packages a lot easier.''' todolist = get_object_or_404(Todolist, id=list_id) repos = get_list_or_404(Repo, svn_root=svn_root) - pkgbases = set(tp.pkg.pkgbase for tp in todolist.packages - if tp.pkg.repo in repos) + pkgbases = {tp.pkg.pkgbase for tp in todolist.packages + if tp.pkg.repo in repos} return HttpResponse('\n'.join(sorted(pkgbases)), mimetype='text/plain') -- cgit v1.2.3-54-g00ecf From b801818eeed1068595cea863e9ae427f3931f925 Mon Sep 17 00:00:00 2001 From: Dan McGee Date: Thu, 27 Dec 2012 23:25:51 -0600 Subject: Make attach_maintainers null-safe Signed-off-by: Dan McGee --- packages/utils.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) (limited to 'packages/utils.py') diff --git a/packages/utils.py b/packages/utils.py index 5adc8637..5f0c111e 100644 --- a/packages/utils.py +++ b/packages/utils.py @@ -249,7 +249,7 @@ def attach_maintainers(packages): the maintainers and attach them to the packages to prevent N+1 query cascading.''' packages = list(packages) - pkgbases = {p.pkgbase for p in packages} + pkgbases = {p.pkgbase for p in packages if p is not None} rels = PackageRelation.objects.filter(type=PackageRelation.MAINTAINER, pkgbase__in=pkgbases).values_list( 'pkgbase', 'user_id').order_by().distinct() @@ -266,6 +266,8 @@ def attach_maintainers(packages): annotated = [] # and finally, attach the maintainer lists on the original packages for package in packages: + if package is None: + continue package.maintainers = maintainers[package.pkgbase] annotated.append(package) -- cgit v1.2.3-54-g00ecf From 375684ed91dd5499e7a4ea7787e45803e8467e16 Mon Sep 17 00:00:00 2001 From: Dan McGee Date: Fri, 18 Jan 2013 20:52:20 -0600 Subject: Use a set instead of list when gathering package differences If we implement the __eq__ and __hash__ methods, we can use a set to gather package difference objects and make deduplication of objects a lot more efficient. Signed-off-by: Dan McGee --- packages/utils.py | 25 +++++++++++++++---------- 1 file changed, 15 insertions(+), 10 deletions(-) (limited to 'packages/utils.py') diff --git a/packages/utils.py b/packages/utils.py index 5f0c111e..a72404f4 100644 --- a/packages/utils.py +++ b/packages/utils.py @@ -1,6 +1,6 @@ from collections import defaultdict from itertools import chain -from operator import itemgetter +from operator import attrgetter, itemgetter import re from django.core.serializers.json import DjangoJSONEncoder @@ -108,10 +108,15 @@ def classes(self): css_classes.append(self.pkg_b.arch.name) return ' '.join(css_classes) - def __cmp__(self, other): - if isinstance(other, Difference): - return cmp(self.__dict__, other.__dict__) - return False + def __key(self): + return (self.pkgname, hash(self.repo), + hash(self.pkg_a), hash(self.pkg_b)) + + def __eq__(self, other): + return self.__key() == other.__key() + + def __hash__(self): + return hash(self.__key()) @cache_function(127) @@ -146,8 +151,8 @@ def get_differences_info(arch_a, arch_b): to_fetch = {row[0] for row in results} # fetch all of the necessary packages pkgs = Package.objects.normal().in_bulk(to_fetch) - # now build a list of tuples containing differences - differences = [] + # now build a set containing differences + differences = set() for row in results: pkg_a = pkgs.get(row[0]) pkg_b = pkgs.get(row[1]) @@ -160,11 +165,11 @@ def get_differences_info(arch_a, arch_b): name = pkg_a.pkgname if pkg_a else pkg_b.pkgname repo = pkg_a.repo if pkg_a else pkg_b.repo item = Difference(name, repo, pkg_b, pkg_a) - if item not in differences: - differences.append(item) + differences.add(item) # now sort our list by repository, package name - differences.sort(key=lambda a: (a.repo.name, a.pkgname)) + key_func = attrgetter('repo.name', 'pkgname') + differences = sorted(differences, key=key_func) return differences -- cgit v1.2.3-54-g00ecf From 2c958511c41f53fb7de49ed4662eec966e0b76a5 Mon Sep 17 00:00:00 2001 From: Dan McGee Date: Tue, 22 Jan 2013 16:48:49 -0600 Subject: Use a subquery rather than two queries in attach_maintainers Now that we are using a database that doesn't stink, it makes more sense to do all of the stuff we need to do down at the database level. This helps a lot when 500+ packages are in play at a given time, such as some of our larger rebuild todo lists. Signed-off-by: Dan McGee --- packages/utils.py | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) (limited to 'packages/utils.py') diff --git a/packages/utils.py b/packages/utils.py index a72404f4..49aeb8ce 100644 --- a/packages/utils.py +++ b/packages/utils.py @@ -6,6 +6,7 @@ from django.core.serializers.json import DjangoJSONEncoder from django.db import connection from django.db.models import Count, Max, F +from django.db.models.query import QuerySet from django.contrib.auth.models import User from main.models import Package, PackageFile, Arch, Repo @@ -253,8 +254,11 @@ def attach_maintainers(packages): '''Given a queryset or something resembling it of package objects, find all the maintainers and attach them to the packages to prevent N+1 query cascading.''' - packages = list(packages) - pkgbases = {p.pkgbase for p in packages if p is not None} + if isinstance(packages, QuerySet): + pkgbases = packages.values('pkgbase') + else: + packages = list(packages) + pkgbases = {p.pkgbase for p in packages if p is not None} rels = PackageRelation.objects.filter(type=PackageRelation.MAINTAINER, pkgbase__in=pkgbases).values_list( 'pkgbase', 'user_id').order_by().distinct() -- cgit v1.2.3-54-g00ecf From 5bc85244281efc916132c86046018d0ebe70b5e9 Mon Sep 17 00:00:00 2001 From: Dan McGee Date: Sun, 10 Feb 2013 12:45:24 -0600 Subject: Fix split packages sitemap We had a ton of duplicate entries included due to the query implicitly including a 'GROUP BY' clause on the default sorting by pkgname. Fix it and cut the sitemap down to the correct size without duplicate entries. Signed-off-by: Dan McGee --- packages/utils.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) (limited to 'packages/utils.py') diff --git a/packages/utils.py b/packages/utils.py index 49aeb8ce..ef6311eb 100644 --- a/packages/utils.py +++ b/packages/utils.py @@ -79,7 +79,7 @@ def get_split_packages_info(): pkgnames = Package.objects.values('pkgname') split_pkgs = Package.objects.exclude(pkgname=F('pkgbase')).exclude( pkgbase__in=pkgnames).values('pkgbase', 'repo', 'arch').annotate( - last_update=Max('last_update')) + last_update=Max('last_update')).order_by().distinct() all_arches = Arch.objects.in_bulk({s['arch'] for s in split_pkgs}) all_repos = Repo.objects.in_bulk({s['repo'] for s in split_pkgs}) for split in split_pkgs: -- cgit v1.2.3-54-g00ecf From 1f2a6384f332e75e9befc13b5a4b7b2906db6c50 Mon Sep 17 00:00:00 2001 From: Dan McGee Date: Mon, 11 Mar 2013 21:25:27 -0500 Subject: Remove function caching in packages/utils We don't see these called enough to make caching the data worth it. Signed-off-by: Dan McGee --- packages/utils.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) (limited to 'packages/utils.py') diff --git a/packages/utils.py b/packages/utils.py index ef6311eb..a4217fbd 100644 --- a/packages/utils.py +++ b/packages/utils.py @@ -10,7 +10,7 @@ from django.contrib.auth.models import User from main.models import Package, PackageFile, Arch, Repo -from main.utils import (cache_function, database_vendor, +from main.utils import (database_vendor, groupby_preserve_order, PackageStandin) from .models import (PackageGroup, PackageRelation, License, Depend, Conflict, Provision, Replacement, @@ -33,7 +33,6 @@ def parse_version(version): return ver, rel, epoch -@cache_function(127) def get_group_info(include_arches=None): raw_groups = PackageGroup.objects.values_list( 'name', 'pkg__arch__name').order_by('name').annotate( @@ -120,7 +119,6 @@ def __hash__(self): return hash(self.__key()) -@cache_function(127) def get_differences_info(arch_a, arch_b): # This is a monster. Join packages against itself, looking for packages in # our non-'any' architectures only, and not having a corresponding package -- cgit v1.2.3-54-g00ecf From b7b24740640e24883cd17fd683e1d465fbb343f8 Mon Sep 17 00:00:00 2001 From: Dan McGee Date: Tue, 16 Apr 2013 22:12:01 -0500 Subject: Various minor code cleanups and fixes Most of these were suggested by PyCharm, and include everything from little syntax issues and other bad smells to dead or bad code. Signed-off-by: Dan McGee --- devel/management/commands/pgp_import.py | 1 + devel/models.py | 1 - devel/utils.py | 2 +- devel/views.py | 2 +- main/log.py | 1 - main/migrations/0029_fill_in_repo_data.py | 1 - main/models.py | 12 +++++------- main/utils.py | 1 - mirrors/management/commands/mirrorcheck.py | 12 +++--------- mirrors/models.py | 2 +- mirrors/utils.py | 6 +++--- packages/migrations/0002_populate_package_relation.py | 2 -- packages/templatetags/package_extras.py | 4 ++-- packages/utils.py | 2 +- packages/views/display.py | 2 -- packages/views/flag.py | 3 +-- public/views.py | 1 - releng/management/commands/syncisos.py | 2 +- releng/models.py | 2 +- releng/views.py | 2 +- retro/templates/retro/index-20030330.html | 1 - sitestatic/archweb.js | 1 - todolists/utils.py | 1 - todolists/views.py | 1 - visualize/static/visualize.js | 2 +- 25 files changed, 23 insertions(+), 44 deletions(-) (limited to 'packages/utils.py') diff --git a/devel/management/commands/pgp_import.py b/devel/management/commands/pgp_import.py index 10e6cfcb..b1f29d77 100644 --- a/devel/management/commands/pgp_import.py +++ b/devel/management/commands/pgp_import.py @@ -95,6 +95,7 @@ def parse_keydata(data): # parse all of the output from our successful GPG command logger.info("parsing command output") + node = None for line in data.split('\n'): parts = line.split(':') if parts[0] == 'pub': diff --git a/devel/models.py b/devel/models.py index 67de40a6..4354e0f2 100644 --- a/devel/models.py +++ b/devel/models.py @@ -4,7 +4,6 @@ from django.db import models from django.db.models.signals import pre_save from django.contrib.auth.models import User -from django.utils.timezone import now from django_countries import CountryField from .fields import PGPKeyField diff --git a/devel/utils.py b/devel/utils.py index e8e3a6c4..340841f5 100644 --- a/devel/utils.py +++ b/devel/utils.py @@ -131,7 +131,7 @@ def find(self, userstring): self.username_email, self.user_name) for matcher in find_methods: user = matcher(name, email) - if user != None: + if user is not None: break self.cache[userstring] = user diff --git a/devel/views.py b/devel/views.py index 61c1e568..4258ea7f 100644 --- a/devel/views.py +++ b/devel/views.py @@ -34,7 +34,7 @@ @login_required def index(request): '''the developer dashboard''' - if(request.user.is_authenticated()): + if request.user.is_authenticated(): inner_q = PackageRelation.objects.filter(user=request.user) else: inner_q = PackageRelation.objects.none() diff --git a/main/log.py b/main/log.py index 63634874..5c745cc8 100644 --- a/main/log.py +++ b/main/log.py @@ -46,7 +46,6 @@ def filter(self, record): trace = '\n'.join(traceback.format_exception(*record.exc_info)) key = md5(trace).hexdigest() - duplicate = False cache = self.cache_module.cache # Test if the cache works diff --git a/main/migrations/0029_fill_in_repo_data.py b/main/migrations/0029_fill_in_repo_data.py index 0887b28c..7da6b1c4 100644 --- a/main/migrations/0029_fill_in_repo_data.py +++ b/main/migrations/0029_fill_in_repo_data.py @@ -7,7 +7,6 @@ class Migration(DataMigration): def forwards(self, orm): - "Write your forwards methods here." orm.Repo.objects.filter(name__istartswith='community').update(bugs_project=5, svn_root='community') orm.Repo.objects.filter(name__iexact='multilib').update(bugs_project=5, svn_root='community') diff --git a/main/models.py b/main/models.py index 89215f05..24aeed89 100644 --- a/main/models.py +++ b/main/models.py @@ -7,7 +7,6 @@ from django.db.models import Q from django.contrib.auth.models import User from django.contrib.sites.models import Site -from django.utils.timezone import now from .fields import PositiveBigIntegerField from .utils import set_created_field @@ -140,7 +139,7 @@ def get_full_url(self, proto='https'): @property def signature(self): try: - data = b64decode(self.pgp_signature) + data = b64decode(self.pgp_signature.encode('utf-8')) except TypeError: return None if not data: @@ -274,7 +273,6 @@ def get_depends(self): Packages will match the testing status of this package if possible. """ deps = [] - arches = None # TODO: we can use list comprehension and an 'in' query to make this # more effective for dep in self.depends.all(): @@ -400,13 +398,13 @@ def elsewhere(self): '''attempt to locate this package anywhere else, regardless of architecture or repository. Excludes this package from the list.''' names = [self.pkgname] - if self.pkgname.startswith('lib32-'): + if self.pkgname.startswith(u'lib32-'): names.append(self.pkgname[6:]) - elif self.pkgname.endswith('-multilib'): + elif self.pkgname.endswith(u'-multilib'): names.append(self.pkgname[:-9]) else: - names.append('lib32-' + self.pkgname) - names.append(self.pkgname + '-multilib') + names.append(u'lib32-' + self.pkgname) + names.append(self.pkgname + u'-multilib') return Package.objects.normal().filter( pkgname__in=names).exclude(id=self.id).order_by( 'arch__name', 'repo__name') diff --git a/main/utils.py b/main/utils.py index 8394e5cd..9ee8db58 100644 --- a/main/utils.py +++ b/main/utils.py @@ -3,7 +3,6 @@ except ImportError: import pickle -from datetime import datetime import hashlib from django.core.cache import cache diff --git a/mirrors/management/commands/mirrorcheck.py b/mirrors/management/commands/mirrorcheck.py index d6de8f22..e7dd7b49 100644 --- a/mirrors/management/commands/mirrorcheck.py +++ b/mirrors/management/commands/mirrorcheck.py @@ -106,19 +106,13 @@ def parse_lastsync(log, data): def check_mirror_url(mirror_url, location, timeout): - if location: - if location.family == socket.AF_INET6: - ipopt = '--ipv6' - elif location.family == socket.AF_INET: - ipopt = '--ipv4' - url = mirror_url.url + 'lastsync' logger.info("checking URL %s", url) log = MirrorLog(url=mirror_url, check_time=now(), location=location) headers = {'User-Agent': 'archweb/1.0'} req = urllib2.Request(url, None, headers) + start = time.time() try: - start = time.time() result = urllib2.urlopen(req, timeout=timeout) data = result.read() result.close() @@ -147,12 +141,12 @@ def check_mirror_url(mirror_url, location, timeout): elif isinstance(e.reason, socket.error): log.error = e.reason.args[1] logger.debug("failed: %s, %s", url, log.error) - except HTTPException as e: + except HTTPException: # e.g., BadStatusLine log.is_success = False log.error = "Exception in processing HTTP request." logger.debug("failed: %s, %s", url, log.error) - except socket.timeout as e: + except socket.timeout: log.is_success = False log.error = "Connection timed out." logger.debug("failed: %s, %s", url, log.error) diff --git a/mirrors/models.py b/mirrors/models.py index 791b0078..d8ac7952 100644 --- a/mirrors/models.py +++ b/mirrors/models.py @@ -92,7 +92,7 @@ def clean(self): families = self.address_families() self.has_ipv4 = socket.AF_INET in families self.has_ipv6 = socket.AF_INET6 in families - except socket.error as e: + except socket.error: # We don't fail in this case; we'll just set both to False self.has_ipv4 = False self.has_ipv6 = False diff --git a/mirrors/utils.py b/mirrors/utils.py index 5a8bbf5d..531cf005 100644 --- a/mirrors/utils.py +++ b/mirrors/utils.py @@ -1,13 +1,13 @@ from datetime import timedelta from django.db import connection -from django.db.models import Avg, Count, Max, Min, StdDev +from django.db.models import Count, Max, Min from django.utils.dateparse import parse_datetime from django.utils.timezone import now from django_countries.fields import Country from main.utils import cache_function, database_vendor -from .models import MirrorLog, MirrorProtocol, MirrorUrl +from .models import MirrorLog, MirrorUrl DEFAULT_CUTOFF = timedelta(hours=24) @@ -165,7 +165,7 @@ def get_mirror_errors(cutoff=DEFAULT_CUTOFF, mirror_id=None): ).order_by('-last_occurred', '-error_count') if mirror_id: - urls = urls.filter(mirror_id=mirror_id) + errors = errors.filter(url__mirror_id=mirror_id) errors = list(errors) for err in errors: diff --git a/packages/migrations/0002_populate_package_relation.py b/packages/migrations/0002_populate_package_relation.py index 738e068f..b0d32c7a 100644 --- a/packages/migrations/0002_populate_package_relation.py +++ b/packages/migrations/0002_populate_package_relation.py @@ -11,7 +11,6 @@ class Migration(DataMigration): ) def forwards(self, orm): - "Write your forwards methods here." # search by pkgbase first and insert those records qs = orm['main.Package'].objects.exclude(maintainer=None).exclude( pkgbase=None).distinct().values('pkgbase', 'maintainer_id') @@ -29,7 +28,6 @@ def forwards(self, orm): defaults={'user_id': row['maintainer_id']}) def backwards(self, orm): - "Write your backwards methods here." if not db.dry_run: orm.PackageRelation.objects.all().delete() pass diff --git a/packages/templatetags/package_extras.py b/packages/templatetags/package_extras.py index f14fab1e..ef0e1aea 100644 --- a/packages/templatetags/package_extras.py +++ b/packages/templatetags/package_extras.py @@ -53,10 +53,10 @@ def do_buildsortqs(parser, token): tagname, sortfield = token.split_contents() except ValueError: raise template.TemplateSyntaxError( - "%r tag requires a single argument" % tagname) + "%r tag requires a single argument" % token) if not (sortfield[0] == sortfield[-1] and sortfield[0] in ('"', "'")): raise template.TemplateSyntaxError( - "%r tag's argument should be in quotes" % tagname) + "%r tag's argument should be in quotes" % token) return BuildQueryStringNode(sortfield[1:-1]) diff --git a/packages/utils.py b/packages/utils.py index a4217fbd..4f3b8665 100644 --- a/packages/utils.py +++ b/packages/utils.py @@ -391,7 +391,7 @@ def signoffs_id_query(model, repos): """ cursor = connection.cursor() # query pre-process- fill in table name and placeholders for IN - repo_sql = ','.join(['%s' for r in repos]) + repo_sql = ','.join(['%s' for _ in repos]) sql = sql % (model._meta.db_table, repo_sql, repo_sql) repo_ids = [r.pk for r in repos] # repo_ids are needed twice, so double the array diff --git a/packages/views/display.py b/packages/views/display.py index 87424483..021c7ed8 100644 --- a/packages/views/display.py +++ b/packages/views/display.py @@ -228,8 +228,6 @@ def download(request, name, repo, arch): if pkg.arch.agnostic: # grab the first non-any arch to fake the download path arch = Arch.objects.exclude(agnostic=True)[0].name - values = { - } url = '{host}{repo}/os/{arch}/{filename}'.format(host=url.url, repo=pkg.repo.name.lower(), arch=arch, filename=pkg.filename) return redirect(url) diff --git a/packages/views/flag.py b/packages/views/flag.py index 5c76e1d5..39cdcef8 100644 --- a/packages/views/flag.py +++ b/packages/views/flag.py @@ -110,7 +110,7 @@ def perform_updates(): subject = '%s package [%s] marked out-of-date' % \ (pkg.repo.name, pkg.pkgname) for maint in maints: - if maint.userprofile.notify == True: + if maint.userprofile.notify is True: toemail.append(maint.email) if toemail: @@ -133,7 +133,6 @@ def perform_updates(): return redirect('package-flag-confirmed', name=name, repo=repo, arch=arch) else: - initial = {} form = FlagForm(authenticated=authenticated) context = { diff --git a/public/views.py b/public/views.py index 22cb8759..39273396 100644 --- a/public/views.py +++ b/public/views.py @@ -125,7 +125,6 @@ def keys(request): master_keys = MasterKey.objects.select_related('owner', 'revoker', 'owner__userprofile', 'revoker__userprofile').filter( revoked__isnull=True) - master_key_ids = frozenset(key.pgp_key[-16:] for key in master_keys) sig_counts = PGPSignature.objects.filter(not_expired, valid=True, signee__in=user_key_ids).order_by().values_list('signer').annotate( diff --git a/releng/management/commands/syncisos.py b/releng/management/commands/syncisos.py index c9f61964..f182cc33 100644 --- a/releng/management/commands/syncisos.py +++ b/releng/management/commands/syncisos.py @@ -20,7 +20,7 @@ def handle_starttag(self, tag, attrs): if tag == 'a': for name, value in attrs: if name == "href": - if value != '../' and self.url_re.search(value) != None: + if value != '../' and self.url_re.search(value) is not None: self.hyperlinks.append(value[:-1]) def parse(self, url): diff --git a/releng/models.py b/releng/models.py index b95f7d52..5ee2f325 100644 --- a/releng/models.py +++ b/releng/models.py @@ -160,7 +160,7 @@ def info_html(self): def torrent(self): try: - data = b64decode(self.torrent_data) + data = b64decode(self.torrent_data.encode('utf-8')) except TypeError: return None if not data: diff --git a/releng/views.py b/releng/views.py index ad4b07d1..b1c76a4a 100644 --- a/releng/views.py +++ b/releng/views.py @@ -231,7 +231,7 @@ def release_torrent(request, version): release = get_object_or_404(Release, version=version) if not release.torrent_data: raise Http404 - data = b64decode(release.torrent_data) + data = b64decode(release.torrent_data.encode('utf-8')) response = HttpResponse(data, content_type='application/x-bittorrent') # TODO: this is duplicated from Release.iso_url() filename = 'archlinux-%s-dual.iso.torrent' % release.version diff --git a/retro/templates/retro/index-20030330.html b/retro/templates/retro/index-20030330.html index 449731af..51cc8ba3 100644 --- a/retro/templates/retro/index-20030330.html +++ b/retro/templates/retro/index-20030330.html @@ -232,7 +232,6 @@
    [ Older News ]

    -


    diff --git a/sitestatic/archweb.js b/sitestatic/archweb.js index dda22d9e..aa225f5f 100644 --- a/sitestatic/archweb.js +++ b/sitestatic/archweb.js @@ -146,7 +146,6 @@ if (typeof $ !== 'undefined' && typeof $.tablesorter !== 'undefined') { (function($) { $.fn.enableCheckboxRangeSelection = function() { var lastCheckbox = null, - lastElement = null, spec = this; spec.unbind("click.checkboxrange"); diff --git a/todolists/utils.py b/todolists/utils.py index 51a75a3c..7b98c887 100644 --- a/todolists/utils.py +++ b/todolists/utils.py @@ -1,5 +1,4 @@ from django.db import connections, router -from django.db.models import Count from .models import Todolist, TodolistPackage from packages.models import Package diff --git a/todolists/views.py b/todolists/views.py index 7636d38e..d5b39934 100644 --- a/todolists/views.py +++ b/todolists/views.py @@ -9,7 +9,6 @@ from django.views.decorators.cache import never_cache from django.views.generic import DeleteView from django.template import Context, loader -from django.template.defaultfilters import slugify from django.utils.timezone import now from main.models import Package, Repo diff --git a/visualize/static/visualize.js b/visualize/static/visualize.js index 7e240d44..5004fe6c 100644 --- a/visualize/static/visualize.js +++ b/visualize/static/visualize.js @@ -55,7 +55,7 @@ function packages_treemap(chart_id, orderings, default_order) { var nodes = d3_div.data([json]).selectAll("div") .data(treemap.nodes, key_func); /* start out new nodes in the center of the picture area */ - var w_center = jq_div.width() / 2; + var w_center = jq_div.width() / 2, h_center = jq_div.height() / 2; nodes.enter().append("div") .attr("class", "treemap-cell") -- cgit v1.2.3-54-g00ecf