summaryrefslogtreecommitdiff
path: root/devel
diff options
context:
space:
mode:
authorNicolás Reynolds <fauno@kiwwwi.com.ar>2011-12-04 23:07:59 -0300
committerNicolás Reynolds <fauno@kiwwwi.com.ar>2011-12-04 23:07:59 -0300
commit988c2c6d400b3721464c2891891807d504b076b0 (patch)
tree013dc3d676edad701d0d3bbd239bc2df0cdf76b4 /devel
parent60a1fc6cc4cef0b9eed58ea4f0ca003b76ec382a (diff)
parent183c4d9cefa95f46c3fa3a6936f837542426eac2 (diff)
Merge branch 'master' of ssh://gparabola/parabolaweb
Conflicts: local_settings.py.example media/archweb.css packages/templatetags/package_extras.py public/views.py templates/packages/details.html templates/packages/flag.html templates/packages/flag_confirmed.html templates/packages/flagged.html templates/packages/search.html templates/public/download.html templates/todolists/view.html
Diffstat (limited to 'devel')
-rw-r--r--devel/admin.py12
-rw-r--r--devel/management/commands/generate_keyring.py86
-rw-r--r--devel/management/commands/rematch_packager.py2
-rw-r--r--devel/management/commands/reporead.py286
-rwxr-xr-xdevel/management/commands/reporead_inotify.py211
-rw-r--r--devel/migrations/0001_initial.py18
-rw-r--r--devel/migrations/0002_auto__add_masterkey.py76
-rw-r--r--devel/migrations/__init__.py0
-rw-r--r--devel/models.py20
-rw-r--r--devel/utils.py16
-rw-r--r--devel/views.py22
11 files changed, 621 insertions, 128 deletions
diff --git a/devel/admin.py b/devel/admin.py
new file mode 100644
index 00000000..84082fb8
--- /dev/null
+++ b/devel/admin.py
@@ -0,0 +1,12 @@
+from django.contrib import admin
+
+from .models import MasterKey
+
+
+class MasterKeyAdmin(admin.ModelAdmin):
+ list_display = ('pgp_key', 'owner', 'created', 'revoker', 'revoked')
+ search_fields = ('pgp_key', 'owner', 'revoker')
+
+admin.site.register(MasterKey, MasterKeyAdmin)
+
+# vim: set ts=4 sw=4 et:
diff --git a/devel/management/commands/generate_keyring.py b/devel/management/commands/generate_keyring.py
new file mode 100644
index 00000000..062c738b
--- /dev/null
+++ b/devel/management/commands/generate_keyring.py
@@ -0,0 +1,86 @@
+# -*- coding: utf-8 -*-
+"""
+generate_keyring command
+
+Assemble a GPG keyring with all known developer keys.
+
+Usage: ./manage.py generate_keyring <keyserver> <keyring_path>
+"""
+
+from django.core.management.base import BaseCommand, CommandError
+
+import logging
+import subprocess
+import sys
+
+from devel.models import MasterKey
+from main.models import UserProfile
+
+logging.basicConfig(
+ level=logging.INFO,
+ format='%(asctime)s -> %(levelname)s: %(message)s',
+ datefmt='%Y-%m-%d %H:%M:%S',
+ stream=sys.stderr)
+logger = logging.getLogger()
+
+class Command(BaseCommand):
+ args = "<keyserver> <keyring_path> [ownertrust_path]"
+ help = "Assemble a GPG keyring with all known developer keys."
+
+ def handle(self, *args, **options):
+ v = int(options.get('verbosity', None))
+ if v == 0:
+ logger.level = logging.ERROR
+ elif v == 1:
+ logger.level = logging.INFO
+ elif v == 2:
+ logger.level = logging.DEBUG
+
+ if len(args) < 2:
+ raise CommandError("keyserver and keyring_path must be provided")
+
+ generate_keyring(args[0], args[1])
+
+ if len(args) > 2:
+ generate_ownertrust(args[2])
+
+
+def generate_keyring(keyserver, keyring):
+ logger.info("getting all known key IDs")
+
+ # Screw you Django, for not letting one natively do value != <empty string>
+ key_ids = UserProfile.objects.filter(user__is_active=True,
+ pgp_key__isnull=False).extra(where=["pgp_key != ''"]).values_list(
+ "pgp_key", flat=True)
+ logger.info("%d keys fetched from user profiles", len(key_ids))
+ master_key_ids = MasterKey.objects.values_list("pgp_key", flat=True)
+ logger.info("%d keys fetched from master keys", len(master_key_ids))
+
+ gpg_cmd = ["gpg", "--no-default-keyring", "--keyring", keyring,
+ "--keyserver", keyserver, "--recv-keys"]
+ logger.info("running command: %r", gpg_cmd)
+ gpg_cmd.extend(key_ids)
+ gpg_cmd.extend(master_key_ids)
+ subprocess.check_call(gpg_cmd)
+ logger.info("keyring at %s successfully updated", keyring)
+
+
+TRUST_LEVELS = {
+ 'unknown': 0,
+ 'expired': 1,
+ 'undefined': 2,
+ 'never': 3,
+ 'marginal': 4,
+ 'fully': 5,
+ 'ultimate': 6,
+}
+
+
+def generate_ownertrust(trust_path):
+ master_key_ids = MasterKey.objects.values_list("pgp_key", flat=True)
+ with open(trust_path, "w") as trustfile:
+ for key_id in master_key_ids:
+ trustfile.write("%s:%d:\n" % (key_id, TRUST_LEVELS['marginal']))
+ logger.info("trust file at %s created or overwritten", trust_path)
+
+# vim: set ts=4 sw=4 et:
diff --git a/devel/management/commands/rematch_packager.py b/devel/management/commands/rematch_packager.py
index ba6e6a54..461d83ab 100644
--- a/devel/management/commands/rematch_packager.py
+++ b/devel/management/commands/rematch_packager.py
@@ -24,7 +24,7 @@ logging.basicConfig(
logger = logging.getLogger()
class Command(NoArgsCommand):
- help = "Runs a check on all active mirror URLs to determine if they are reachable via IPv4 and/or v6."
+ help = "Match all packages with a packager_str but NULL packager_id to a packager if we can find one."
def handle_noargs(self, **options):
v = int(options.get('verbosity', None))
diff --git a/devel/management/commands/reporead.py b/devel/management/commands/reporead.py
index cf597577..c444538b 100644
--- a/devel/management/commands/reporead.py
+++ b/devel/management/commands/reporead.py
@@ -13,10 +13,6 @@ Example:
./manage.py reporead i686 /tmp/core.db.tar.gz
"""
-from django.core.management.base import BaseCommand, CommandError
-from django.contrib.auth.models import User
-from django.db import transaction
-
from collections import defaultdict
import io
import os
@@ -27,6 +23,11 @@ import logging
from datetime import datetime
from optparse import make_option
+from django.core.management.base import BaseCommand, CommandError
+from django.contrib.auth.models import User
+from django.db import connections, router, transaction
+from django.db.utils import IntegrityError
+
from devel.utils import UserFinder
from main.models import Arch, Package, PackageDepend, PackageFile, Repo
from packages.models import Conflict, Provision, Replacement
@@ -36,6 +37,8 @@ logging.basicConfig(
format='%(asctime)s -> %(levelname)s: %(message)s',
datefmt='%Y-%m-%d %H:%M:%S',
stream=sys.stderr)
+TRACE = 5
+logging.addLevelName(TRACE, 'TRACE')
logger = logging.getLogger()
class Command(BaseCommand):
@@ -51,8 +54,6 @@ class Command(BaseCommand):
def handle(self, arch=None, filename=None, **options):
if not arch:
raise CommandError('Architecture is required.')
- if not validate_arch(arch):
- raise CommandError('Specified architecture %s is not currently known.' % arch)
if not filename:
raise CommandError('Package database file is required.')
filename = os.path.normpath(filename)
@@ -72,8 +73,8 @@ class Command(BaseCommand):
class Pkg(object):
"""An interim 'container' object for holding Arch package data."""
- bare = ( 'name', 'base', 'arch', 'desc', 'filename',
- 'md5sum', 'sha256sum', 'pgpsig', 'url', 'packager' )
+ bare = ( 'name', 'base', 'arch', 'filename',
+ 'md5sum', 'sha256sum', 'url', 'packager' )
number = ( 'csize', 'isize' )
collections = ( 'depends', 'optdepends', 'conflicts',
'provides', 'replaces', 'groups', 'license', 'files' )
@@ -85,6 +86,7 @@ class Pkg(object):
self.ver = None
self.rel = None
self.epoch = 0
+ self.pgpsig = None
for k in self.bare + self.number:
setattr(self, k, None)
for k in self.collections:
@@ -99,6 +101,9 @@ class Pkg(object):
setattr(self, k, v[0][:254])
elif k in self.number:
setattr(self, k, long(v[0]))
+ elif k in ('desc', 'pgpsig'):
+ # do NOT prune these values at all
+ setattr(self, k, v[0])
elif k == 'version':
match = self.version_re.match(v[0])
self.ver = match.group(3)
@@ -185,8 +190,6 @@ def create_multivalued(dbpkg, repopkg, db_attr, repo_attr):
finder = UserFinder()
def populate_pkg(dbpkg, repopkg, force=False, timestamp=None):
- db_score = 1
-
if repopkg.base:
dbpkg.pkgbase = repopkg.base
else:
@@ -210,7 +213,7 @@ def populate_pkg(dbpkg, repopkg, force=False, timestamp=None):
dbpkg.last_update = timestamp
dbpkg.save()
- db_score += populate_files(dbpkg, repopkg, force=force)
+ populate_files(dbpkg, repopkg, force=force)
dbpkg.packagedepend_set.all().delete()
for y in repopkg.depends:
@@ -231,28 +234,23 @@ def populate_pkg(dbpkg, repopkg, force=False, timestamp=None):
create_multivalued(dbpkg, repopkg, 'groups', 'groups')
create_multivalued(dbpkg, repopkg, 'licenses', 'license')
- related_score = (len(repopkg.depends) + len(repopkg.optdepends)
- + len(repopkg.conflicts) + len(repopkg.provides)
- + len(repopkg.replaces) + len(repopkg.groups)
- + len(repopkg.license))
- if related_score:
- db_score += (related_score / 20) + 1
- return db_score
+pkg_same_version = lambda pkg, dbpkg: pkg.ver == dbpkg.pkgver \
+ and pkg.rel == dbpkg.pkgrel and pkg.epoch == dbpkg.epoch
def populate_files(dbpkg, repopkg, force=False):
if not force:
- if dbpkg.pkgver != repopkg.ver or dbpkg.pkgrel != repopkg.rel \
- or dbpkg.epoch != repopkg.epoch:
+ if not pkg_same_version(repopkg, dbpkg):
logger.info("DB version (%s) didn't match repo version "
"(%s) for package %s, skipping file list addition",
dbpkg.full_version, repopkg.full_version, dbpkg.pkgname)
- return 0
+ return
if not dbpkg.files_last_update or not dbpkg.last_update:
pass
elif dbpkg.files_last_update > dbpkg.last_update:
- return 0
+ return
+
# only delete files if we are reading a DB that contains them
if repopkg.has_files:
dbpkg.packagefile_set.all().delete()
@@ -271,125 +269,156 @@ def populate_files(dbpkg, repopkg, force=False):
pkgfile.save(force_insert=True)
dbpkg.files_last_update = datetime.utcnow()
dbpkg.save()
- return (len(repopkg.files) / 50) + 1
- return 0
-class Batcher(object):
- def __init__(self, threshold, start=0):
- self.threshold = threshold
- self.meter = start
+def select_pkg_for_update(dbpkg):
+ database = router.db_for_write(Package, instance=dbpkg)
+ connection = connections[database]
+ if 'sqlite' in connection.settings_dict['ENGINE'].lower():
+ return dbpkg
+ new_pkg = Package.objects.raw(
+ 'SELECT * FROM packages WHERE id = %s FOR UPDATE',
+ [dbpkg.id])
+ return list(new_pkg)[0]
- def batch_commit(self, score):
- """
- Track updates to the database and perform a commit if the batch
- becomes sufficiently large. "Large" is defined by waiting for the
- sum of scores to exceed the arbitrary threshold value; once it is
- hit a commit is issued.
- """
- self.meter += score
- if self.meter > self.threshold:
- logger.debug("Committing transaction, batch threshold hit")
- transaction.commit()
- self.meter = 0
+def update_common(archname, reponame, pkgs, sanity_check=True):
+ with transaction.commit_manually():
+ repository = Repo.objects.get(name__iexact=reponame)
+ architecture = Arch.objects.get(name__iexact=archname)
+ # no-arg order_by() removes even the default ordering; we don't need it
+ dbpkgs = Package.objects.filter(
+ arch=architecture, repo=repository).order_by()
-@transaction.commit_on_success
-def db_update(archname, reponame, pkgs, options):
- """
- Parses a list and updates the Arch dev database accordingly.
+ logger.info("%d packages in current web DB", len(dbpkgs))
+ logger.info("%d packages in new updating DB", len(pkgs))
- Arguments:
- pkgs -- A list of Pkg objects.
+ # Try to catch those random package deletions that make Eric so unhappy.
+ if len(dbpkgs):
+ dbpercent = 100.0 * len(pkgs) / len(dbpkgs)
+ else:
+ dbpercent = 0.0
+ logger.info("DB package ratio: %.1f%%", dbpercent)
+
+ # Fewer than 20 packages makes the percentage check unreliable, but it also
+ # means we expect the repo to fluctuate a lot.
+ msg = "Package database has %.1f%% the number of packages in the " \
+ "web database" % dbpercent
+ if not sanity_check:
+ pass
+ elif repository.testing or repository.staging:
+ pass
+ elif len(dbpkgs) == 0 and len(pkgs) == 0:
+ pass
+ elif len(dbpkgs) > 20 and dbpercent < 50.0:
+ logger.error(msg)
+ raise Exception(msg)
+ elif dbpercent < 75.0:
+ logger.warning(msg)
+
+ # If isolation level is repeatable-read, we need to ensure each package
+ # update starts a new transaction and re-queries the database as necessary
+ # to guard against simultaneous updates
+ transaction.commit()
+ return dbpkgs
+
+def db_update(archname, reponame, pkgs, force=False):
"""
- logger.info('Updating Arch: %s', archname)
- force = options.get('force', False)
- filesonly = options.get('filesonly', False)
+ Parses a list of packages and updates the packages database accordingly.
+ """
+ logger.info('Updating %s (%s)', reponame, archname)
+ dbpkgs = update_common(archname, reponame, pkgs, True)
repository = Repo.objects.get(name__iexact=reponame)
architecture = Arch.objects.get(name__iexact=archname)
- # no-arg order_by() removes even the default ordering; we don't need it
- dbpkgs = Package.objects.filter(
- arch=architecture, repo=repository).order_by()
+
# This makes our inner loop where we find packages by name *way* more
# efficient by not having to go to the database for each package to
# SELECT them by name.
- dbdict = dict([(pkg.pkgname, pkg) for pkg in dbpkgs])
+ dbdict = dict((dbpkg.pkgname, dbpkg) for dbpkg in dbpkgs)
- logger.debug("Creating sets")
dbset = set(dbdict.keys())
syncset = set([pkg.name for pkg in pkgs])
- logger.info("%d packages in current web DB", len(dbset))
- logger.info("%d packages in new updating db", len(syncset))
+
in_sync_not_db = syncset - dbset
logger.info("%d packages in sync not db", len(in_sync_not_db))
-
- # Try to catch those random package deletions that make Eric so unhappy.
- if len(dbset):
- dbpercent = 100.0 * len(syncset) / len(dbset)
- else:
- dbpercent = 0.0
- logger.info("DB package ratio: %.1f%%", dbpercent)
-
- # Fewer than 20 packages makes the percentage check unreliable, but it also
- # means we expect the repo to fluctuate a lot.
- msg = "Package database has %.1f%% the number of packages in the " \
- "web database" % dbpercent
- if len(dbset) == 0 and len(syncset) == 0:
- pass
- elif not filesonly and \
- len(dbset) > 20 and dbpercent < 50.0 and \
- not repository.testing and not repository.staging:
- logger.error(msg)
- raise Exception(msg)
- elif dbpercent < 75.0:
- logger.warning(msg)
-
- batcher = Batcher(100)
-
- if not filesonly:
- # packages in syncdb and not in database (add to database)
- for p in [x for x in pkgs if x.name in in_sync_not_db]:
- logger.info("Adding package %s", p.name)
- pkg = Package(pkgname=p.name, arch=architecture, repo=repository)
- score = populate_pkg(pkg, p, timestamp=datetime.utcnow())
- batcher.batch_commit(score)
-
- # packages in database and not in syncdb (remove from database)
- in_db_not_sync = dbset - syncset
- for p in in_db_not_sync:
- logger.info("Removing package %s", p)
- dbp = dbdict[p]
- dbp.delete()
- batcher.batch_commit(1)
+ # packages in syncdb and not in database (add to database)
+ for pkg in (pkg for pkg in pkgs if pkg.name in in_sync_not_db):
+ logger.info("Adding package %s", pkg.name)
+ dbpkg = Package(pkgname=pkg.name, arch=architecture, repo=repository)
+ try:
+ with transaction.commit_on_success():
+ populate_pkg(dbpkg, pkg, timestamp=datetime.utcnow())
+ except IntegrityError:
+ logger.warning("Could not add package %s; "
+ "not fatal if another thread beat us to it.",
+ pkg.name, exc_info=True)
+
+ # packages in database and not in syncdb (remove from database)
+ for pkgname in (dbset - syncset):
+ logger.info("Removing package %s", pkgname)
+ dbpkg = dbdict[pkgname]
+ with transaction.commit_on_success():
+ # no race condition here as long as simultaneous threads both
+ # issue deletes; second delete will be a no-op
+ dbpkg.delete()
# packages in both database and in syncdb (update in database)
pkg_in_both = syncset & dbset
- for p in [x for x in pkgs if x.name in pkg_in_both]:
- logger.debug("Looking for package updates")
- dbp = dbdict[p.name]
+ for pkg in (x for x in pkgs if x.name in pkg_in_both):
+ logger.debug("Checking package %s", pkg.name)
+ dbpkg = dbdict[pkg.name]
timestamp = None
# for a force, we don't want to update the timestamp.
# for a non-force, we don't want to do anything at all.
- if filesonly:
- pass
- elif p.ver == dbp.pkgver and p.rel == dbp.pkgrel \
- and p.epoch == dbp.epoch:
+ if pkg_same_version(pkg, dbpkg):
if not force:
continue
else:
timestamp = datetime.utcnow()
- if filesonly:
- logger.debug("Checking files for package %s", p.name)
- score = populate_files(dbp, p, force=force)
- else:
- logger.info("Updating package %s", p.name)
- score = populate_pkg(dbp, p, force=force, timestamp=timestamp)
+ # The odd select_for_update song and dance here are to ensure
+ # simultaneous updates don't happen on a package, causing
+ # files/depends/all related items to be double-imported.
+ with transaction.commit_on_success():
+ # TODO Django 1.4 select_for_update() will work once released
+ dbpkg = select_pkg_for_update(dbpkg)
+ if pkg_same_version(pkg, dbpkg):
+ logger.debug("Package %s was already updated", pkg.name)
+ continue
+ logger.info("Updating package %s", pkg.name)
+ populate_pkg(dbpkg, pkg, force=force, timestamp=timestamp)
- batcher.batch_commit(score)
+ logger.info('Finished updating arch: %s', archname)
- logger.info('Finished updating Arch: %s', archname)
+
+def filesonly_update(archname, reponame, pkgs, force=False):
+ """
+ Parses a list of packages and updates the packages database accordingly.
+ """
+ logger.info('Updating files for %s (%s)', reponame, archname)
+ dbpkgs = update_common(archname, reponame, pkgs, False)
+ dbdict = dict((dbpkg.pkgname, dbpkg) for dbpkg in dbpkgs)
+ dbset = set(dbdict.keys())
+
+ for pkg in (pkg for pkg in pkgs if pkg.name in dbset):
+ dbpkg = dbdict[pkg.name]
+
+ # The odd select_for_update song and dance here are to ensure
+ # simultaneous updates don't happen on a package, causing
+ # files to be double-imported.
+ with transaction.commit_on_success():
+ if not dbpkg.files_last_update or not dbpkg.last_update:
+ pass
+ elif dbpkg.files_last_update > dbpkg.last_update:
+ logger.debug("Files for %s are up to date", pkg.name)
+ continue
+ # TODO Django 1.4 select_for_update() will work once released
+ dbpkg = select_pkg_for_update(dbpkg)
+ logger.debug("Checking files for package %s", pkg.name)
+ populate_files(dbpkg, pkg, force=force)
+
+ logger.info('Finished updating arch: %s', archname)
def parse_info(iofile):
@@ -404,7 +433,7 @@ def parse_info(iofile):
continue
elif line.startswith('%') and line.endswith('%'):
blockname = line[1:-1].lower()
- logger.debug("Parsing package block %s", blockname)
+ logger.log(TRACE, "Parsing package block %s", blockname)
store[blockname] = []
elif blockname:
store[blockname].append(line)
@@ -446,49 +475,64 @@ def parse_repo(repopath):
continue
data_file = repodb.extractfile(tarinfo)
data_file = io.TextIOWrapper(io.BytesIO(data_file.read()),
- encoding='utf=8')
+ encoding='UTF-8')
try:
pkgs[pkgid].populate(parse_info(data_file))
except UnicodeDecodeError:
logger.warn("Could not correctly decode %s, skipping file",
tarinfo.name)
data_file.close()
+ del data_file
- logger.debug("Done parsing file %s", fname)
+ logger.debug("Done parsing file %s/%s", pkgid, fname)
repodb.close()
logger.info("Finished repo parsing, %d total packages", len(pkgs))
return (reponame, pkgs.values())
-def validate_arch(archname):
+def locate_arch(arch):
"Check if arch is valid."
- return Arch.objects.filter(name__iexact=archname).exists()
+ if isinstance(arch, Arch):
+ return arch
+ try:
+ return Arch.objects.get(name__iexact=arch)
+ except Arch.DoesNotExist:
+ raise CommandError(
+ 'Specified architecture %s is not currently known.' % arch)
+
def read_repo(primary_arch, repo_file, options):
"""
Parses repo.db.tar.gz file and returns exit status.
"""
+ # always returns an Arch object, regardless of what is passed in
+ primary_arch = locate_arch(primary_arch)
+ force = options.get('force', False)
+ filesonly = options.get('filesonly', False)
+
repo, packages = parse_repo(repo_file)
# group packages by arch -- to handle noarch stuff
packages_arches = {}
for arch in Arch.objects.filter(agnostic=True):
packages_arches[arch.name] = []
- packages_arches[primary_arch] = []
+ packages_arches[primary_arch.name] = []
for package in packages:
if package.arch in packages_arches:
packages_arches[package.arch].append(package)
else:
# we don't include mis-arched packages
- logger.warning("Package %s arch = %s",
- package.name, package.arch)
+ logger.warning("Package %s arch = %s", package.name, package.arch)
del packages
- logger.info('Starting database updates.')
+ logger.info('Starting database updates for %s.', repo_file)
for arch in sorted(packages_arches.keys()):
- db_update(arch, repo, packages_arches[arch], options)
- logger.info('Finished database updates.')
+ if filesonly:
+ filesonly_update(arch, repo, packages_arches[arch], force)
+ else:
+ db_update(arch, repo, packages_arches[arch], force)
+ logger.info('Finished database updates for %s.', repo_file)
return 0
# vim: set ts=4 sw=4 et:
diff --git a/devel/management/commands/reporead_inotify.py b/devel/management/commands/reporead_inotify.py
new file mode 100755
index 00000000..c74762eb
--- /dev/null
+++ b/devel/management/commands/reporead_inotify.py
@@ -0,0 +1,211 @@
+# -*- coding: utf-8 -*-
+"""
+reporead_inotify command
+
+Watches repo.files.tar.gz files for updates and parses them after a short delay
+in order to catch all updates in a single bulk update.
+
+Usage: ./manage.py reporead_inotify [path_template]
+
+Where 'path_template' is an optional path_template for finding the
+repo.files.tar.gz files. The form is '/srv/ftp/%(repo)s/os/%(arch)s/', which is
+also the default template if none is specified. While 'repo' is not required to
+be present in the path_template, note that 'arch' is so reporead can function
+correctly.
+"""
+
+import logging
+import multiprocessing
+import os
+import pyinotify
+import sys
+import threading
+import time
+
+from django.core.management.base import BaseCommand, CommandError
+from django.db import connection
+
+from main.models import Arch, Repo
+from .reporead import read_repo
+
+logging.basicConfig(
+ level=logging.WARNING,
+ format='%(asctime)s -> %(levelname)s: %(message)s',
+ datefmt='%Y-%m-%d %H:%M:%S',
+ stream=sys.stderr)
+logger = logging.getLogger()
+
+class Command(BaseCommand):
+ help = "Watch database files and run an update when necessary."
+ args = "[path_template]"
+
+ def handle(self, path_template=None, **options):
+ v = int(options.get('verbosity', 0))
+ if v == 0:
+ logger.level = logging.ERROR
+ elif v == 1:
+ logger.level = logging.INFO
+ elif v == 2:
+ logger.level = logging.DEBUG
+
+ if not path_template:
+ path_template = '/srv/ftp/%(repo)s/os/%(arch)s/'
+ self.path_template = path_template
+
+ notifier = self.setup_notifier()
+ logger.info('Entering notifier loop')
+ notifier.loop()
+
+ logger.info('Cancelling remaining threads...')
+ for thread in threading.enumerate():
+ if hasattr(thread, 'cancel'):
+ thread.cancel()
+
+ def setup_notifier(self):
+ '''Set up and configure the inotify machinery and logic.
+ This takes the provided or default path_template and builds a list of
+ directories we need to watch for database updates. It then validates
+ and passes these on to the various pyinotify pieces as necessary and
+ finally builds and returns a notifier object.'''
+ arches = Arch.objects.filter(agnostic=False)
+ repos = Repo.objects.all()
+ arch_path_map = dict((arch, None) for arch in arches)
+ all_paths = set()
+ total_paths = 0
+ for arch in arches:
+ combos = ({ 'repo': repo.name.lower(), 'arch': arch.name }
+ for repo in repos)
+ # take a python format string and generate all unique combinations
+ # of directories from it; using set() ensures we filter it down
+ paths = set(self.path_template % values for values in combos)
+ total_paths += len(paths)
+ all_paths |= paths
+ arch_path_map[arch] = paths
+
+ logger.info('Watching %d total paths', total_paths)
+ logger.debug(all_paths)
+
+ # sanity check- basically ensure every path we created from the
+ # template mapped to only one architecture
+ if total_paths != len(all_paths):
+ raise CommandError('path template did not uniquely '
+ 'determine architecture for each file')
+
+ # this thread is done using the database; all future access is done in
+ # the spawned read_repo() processes, so close the otherwise completely
+ # idle connection.
+ connection.close()
+
+ # A proper atomic replacement of the database as done by rsync is type
+ # IN_MOVED_TO. repo-add/remove will finish with a IN_CLOSE_WRITE.
+ mask = pyinotify.IN_CLOSE_WRITE | pyinotify.IN_MOVED_TO
+
+ manager = pyinotify.WatchManager()
+ for name in all_paths:
+ manager.add_watch(name, mask)
+
+ handler = EventHandler(arch_paths=arch_path_map)
+ return pyinotify.Notifier(manager, handler)
+
+
+class Database(object):
+ '''A object representing a pacman database on the filesystem. It stores
+ various bits of metadata and state representing the file path, when we last
+ updated, how long our delay is before performing the update, whether we are
+ updating now, etc.'''
+ def __init__(self, arch, path, delay=60.0, nice=3):
+ self.arch = arch
+ self.path = path
+ self.delay = delay
+ self.nice = nice
+ self.mtime = None
+ self.last_import = None
+ self.update_thread = None
+ self.updating = False
+ self.run_again = False
+ self.lock = threading.Lock()
+
+ def _start_update_countdown(self):
+ self.update_thread = threading.Timer(self.delay, self.update)
+ logger.info('Starting %.1f second countdown to update %s',
+ self.delay, self.path)
+ self.update_thread.start()
+
+ def queue_for_update(self, mtime):
+ logger.debug('Queueing database %s...', self.path)
+ with self.lock:
+ self.mtime = mtime
+ if self.updating:
+ # store the fact that we will need to run it again
+ self.run_again = True
+ return
+ if self.update_thread:
+ self.update_thread.cancel()
+ self.update_thread = None
+ self._start_update_countdown()
+
+ def update(self):
+ logger.debug('Updating database %s...', self.path)
+ with self.lock:
+ self.last_import = time.time()
+ self.updating = True
+
+ try:
+ # invoke reporead's primary method. we do this in a separate
+ # process for memory conservation purposes; these processes grow
+ # rather large so it is best to free up the memory ASAP.
+ def run():
+ if self.nice != 0:
+ os.nice(self.nice)
+ read_repo(self.arch, self.path, {})
+
+ process = multiprocessing.Process(target=run)
+ process.start()
+ process.join()
+ finally:
+ logger.debug('Done updating database %s.', self.path)
+ with self.lock:
+ self.update_thread = None
+ self.updating = False
+ if self.run_again:
+ self.run_again = False
+ self._start_update_countdown()
+
+
+class EventHandler(pyinotify.ProcessEvent):
+ '''Our main event handler which listens for database change events. Because
+ we are watching the whole directory, we filter down and only look at those
+ events dealing with files databases.'''
+
+ def my_init(self, **kwargs):
+ self.databases = {}
+ self.arch_lookup = {}
+
+ # we really want a single path to arch mapping, so massage the data
+ arch_paths = kwargs['arch_paths']
+ for arch, paths in arch_paths.items():
+ self.arch_lookup.update((path.rstrip('/'), arch) for path in paths)
+
+ def process_default(self, event):
+ '''Primary event processing function which kicks off reporead timer
+ threads if a files database was updated.'''
+ if not event.name:
+ return
+ # screen to only the files we care about
+ if event.name.endswith('.files.tar.gz'):
+ path = event.pathname
+ stat = os.stat(path)
+ database = self.databases.get(path, None)
+ if database is None:
+ arch = self.arch_lookup.get(event.path, None)
+ if arch is None:
+ logger.warning(
+ 'Could not determine arch for %s, skipping update',
+ path)
+ return
+ database = Database(arch, path)
+ self.databases[path] = database
+ database.queue_for_update(stat.st_mtime)
+
+
+# vim: set ts=4 sw=4 et:
diff --git a/devel/migrations/0001_initial.py b/devel/migrations/0001_initial.py
new file mode 100644
index 00000000..c28fc20f
--- /dev/null
+++ b/devel/migrations/0001_initial.py
@@ -0,0 +1,18 @@
+# encoding: utf-8
+from south.db import db
+from south.v2 import SchemaMigration
+from django.db import models
+
+class Migration(SchemaMigration):
+
+ def forwards(self, orm):
+ pass
+
+
+ def backwards(self, orm):
+ pass
+
+
+ models = {}
+
+ complete_apps = ['devel']
diff --git a/devel/migrations/0002_auto__add_masterkey.py b/devel/migrations/0002_auto__add_masterkey.py
new file mode 100644
index 00000000..ac1f745a
--- /dev/null
+++ b/devel/migrations/0002_auto__add_masterkey.py
@@ -0,0 +1,76 @@
+# encoding: utf-8
+from south.db import db
+from south.v2 import SchemaMigration
+from django.db import models
+
+class Migration(SchemaMigration):
+
+ depends_on = (
+ ("main", "0051_auto__chg_field_userprofile_pgp_key"),
+ )
+
+ def forwards(self, orm):
+ # Adding model 'MasterKey'
+ db.create_table('devel_masterkey', (
+ ('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
+ ('owner', self.gf('django.db.models.fields.related.ForeignKey')(related_name='masterkey_owner', to=orm['auth.User'])),
+ ('revoker', self.gf('django.db.models.fields.related.ForeignKey')(related_name='masterkey_revoker', to=orm['auth.User'])),
+ ('pgp_key', self.gf('main.fields.PGPKeyField')(max_length=40)),
+ ('created', self.gf('django.db.models.fields.DateTimeField')()),
+ ('revoked', self.gf('django.db.models.fields.DateTimeField')(null=True, blank=True)),
+ ))
+ db.send_create_signal('devel', ['MasterKey'])
+
+ def backwards(self, orm):
+ db.delete_table('devel_masterkey')
+
+
+ models = {
+ 'auth.group': {
+ 'Meta': {'object_name': 'Group'},
+ 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
+ 'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
+ },
+ 'auth.permission': {
+ 'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
+ 'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
+ 'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
+ 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
+ },
+ 'auth.user': {
+ 'Meta': {'object_name': 'User'},
+ 'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
+ 'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
+ 'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
+ 'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
+ 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
+ 'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
+ 'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
+ 'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
+ 'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
+ 'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
+ 'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
+ 'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
+ },
+ 'contenttypes.contenttype': {
+ 'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
+ 'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
+ 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
+ 'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
+ },
+ 'devel.masterkey': {
+ 'Meta': {'object_name': 'MasterKey'},
+ 'created': ('django.db.models.fields.DateTimeField', [], {}),
+ 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'owner': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'masterkey_owner'", 'to': "orm['auth.User']"}),
+ 'pgp_key': ('main.fields.PGPKeyField', [], {'max_length': '40'}),
+ 'revoked': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
+ 'revoker': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'masterkey_revoker'", 'to': "orm['auth.User']"})
+ }
+ }
+
+ complete_apps = ['devel']
diff --git a/devel/migrations/__init__.py b/devel/migrations/__init__.py
new file mode 100644
index 00000000..e69de29b
--- /dev/null
+++ b/devel/migrations/__init__.py
diff --git a/devel/models.py b/devel/models.py
index e69de29b..f31b8fbb 100644
--- a/devel/models.py
+++ b/devel/models.py
@@ -0,0 +1,20 @@
+from django.db import models
+from django.contrib.auth.models import User
+
+from main.fields import PGPKeyField
+
+
+class MasterKey(models.Model):
+ owner = models.ForeignKey(User, related_name='masterkey_owner',
+ help_text="The developer holding this master key")
+ revoker = models.ForeignKey(User, related_name='masterkey_revoker',
+ help_text="The developer holding the revocation certificate")
+ pgp_key = PGPKeyField(max_length=40, verbose_name="PGP key fingerprint",
+ help_text="consists of 40 hex digits; use `gpg --fingerprint`")
+ created = models.DateTimeField()
+ revoked = models.DateTimeField(null=True, blank=True)
+
+ class Meta:
+ ordering = ('created',)
+
+# vim: set ts=4 sw=4 et:
diff --git a/devel/utils.py b/devel/utils.py
index d7a154a8..62b12cd5 100644
--- a/devel/utils.py
+++ b/devel/utils.py
@@ -47,6 +47,7 @@ SELECT pr.user_id, COUNT(*), COUNT(p.flag_date)
class UserFinder(object):
def __init__(self):
self.cache = {}
+ self.username_cache = {}
@staticmethod
def user_email(name, email):
@@ -111,7 +112,22 @@ class UserFinder(object):
self.cache[userstring] = user
return user
+ def find_by_username(self, username):
+ if not username:
+ return None
+ if username in self.username_cache:
+ return self.username_cache[username]
+
+ try:
+ user = User.objects.get(username=username)
+ except User.DoesNotExist:
+ user = None
+
+ self.username_cache[username] = user
+ return user
+
def clear_cache(self):
self.cache = {}
+ self.username_cache = {}
# vim: set ts=4 sw=4 et:
diff --git a/devel/views.py b/devel/views.py
index 27c32e7b..b9bd7cce 100644
--- a/devel/views.py
+++ b/devel/views.py
@@ -18,6 +18,7 @@ from main.models import Package, PackageDepend, PackageFile, TodolistPkg
from main.models import Arch, Repo
from main.models import UserProfile
from packages.models import PackageRelation
+from packages.utils import get_signoff_groups
from todolists.utils import get_annotated_todolists
from .utils import get_annotated_maintainers
@@ -31,7 +32,12 @@ from string import ascii_letters, digits
@never_cache
def index(request):
'''the developer dashboard'''
- inner_q = PackageRelation.objects.filter(user=request.user).values('pkgbase')
+ if(request.user.is_authenticated()):
+ inner_q = PackageRelation.objects.filter(user=request.user)
+ else:
+ inner_q = PackageRelation.objects.none()
+ inner_q = inner_q.values('pkgbase')
+
flagged = Package.objects.normal().filter(
flag_date__isnull=False, pkgbase__in=inner_q).order_by('pkgname')
@@ -43,6 +49,9 @@ def index(request):
todolists = get_annotated_todolists()
todolists = [todolist for todolist in todolists if todolist.incomplete_count > 0]
+ signoffs = sorted(get_signoff_groups(user=request.user),
+ key=operator.attrgetter('pkgbase'))
+
maintainers = get_annotated_maintainers()
maintained = PackageRelation.objects.filter(
@@ -65,6 +74,7 @@ def index(request):
'orphan': orphan,
'flagged' : flagged,
'todopkgs' : todopkgs,
+ 'signoffs': signoffs
}
return direct_to_template(request, 'devel/index.html', page_dict)
@@ -73,11 +83,11 @@ def index(request):
@never_cache
def clock(request):
devs = User.objects.filter(is_active=True).order_by(
- 'username').select_related('userprofile')
+ 'first_name', 'last_name').select_related('userprofile')
- # now annotate each dev object with their current time
now = datetime.now()
utc_now = datetime.utcnow().replace(tzinfo=pytz.utc)
+ # now annotate each dev object with their current time
for dev in devs:
tz = pytz.timezone(dev.userprofile.time_zone)
dev.current_time = utc_now.astimezone(tz)
@@ -142,12 +152,12 @@ def report(request, report, username=None):
if report == 'old':
title = 'Packages last built more than two years ago'
- cutoff = datetime.now() - timedelta(days=365 * 2)
+ cutoff = datetime.utcnow() - timedelta(days=365 * 2)
packages = packages.filter(
build_date__lt=cutoff).order_by('build_date')
elif report == 'long-out-of-date':
title = 'Packages marked out-of-date more than 90 days ago'
- cutoff = datetime.now() - timedelta(days=90)
+ cutoff = datetime.utcnow() - timedelta(days=90)
packages = packages.filter(
flag_date__lt=cutoff).order_by('flag_date')
elif report == 'big':
@@ -282,7 +292,7 @@ class NewUserForm(forms.ModelForm):
send_mail("Your new parabolaweb account",
template.render(ctx),
- 'Parabola <dev@list.parabolagnulinux.org>',
+ 'Parabola <dev@lists.parabolagnulinux.org>',
[user.email],
fail_silently=False)